@@ -1745,9 +1745,11 @@ set_edge_can_fallthru_flag (void)
continue;
if (!any_condjump_p (BB_END (bb)))
continue;
- if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
+
+ rtx_jump_insn *bb_end_jump = as_a <rtx_jump_insn *> (BB_END (bb));
+ if (!invert_jump (bb_end_jump, JUMP_LABEL (bb_end_jump), 0))
continue;
- invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
+ invert_jump (bb_end_jump, JUMP_LABEL (bb_end_jump), 0);
EDGE_SUCC (bb, 0)->flags |= EDGE_CAN_FALLTHRU;
EDGE_SUCC (bb, 1)->flags |= EDGE_CAN_FALLTHRU;
}
@@ -1902,9 +1904,15 @@ fix_up_fall_thru_edges (void)
fall_thru_label = block_label (fall_thru->dest);
- if (old_jump && JUMP_P (old_jump) && fall_thru_label)
- invert_worked = invert_jump (old_jump,
- fall_thru_label,0);
+ if (old_jump && fall_thru_label)
+ {
+ rtx_jump_insn *old_jump_insn =
+ dyn_cast <rtx_jump_insn *> (old_jump);
+ if (old_jump_insn)
+ invert_worked = invert_jump (old_jump_insn,
+ fall_thru_label, 0);
+ }
+
if (invert_worked)
{
fall_thru->flags &= ~EDGE_FALLTHRU;
@@ -2021,10 +2029,9 @@ fix_crossing_conditional_branches (void)
edge succ2;
edge crossing_edge;
edge new_edge;
- rtx_insn *old_jump;
rtx set_src;
rtx old_label = NULL_RTX;
- rtx new_label;
+ rtx_code_label *new_label;
FOR_EACH_BB_FN (cur_bb, cfun)
{
@@ -2049,7 +2056,7 @@ fix_crossing_conditional_branches (void)
if (crossing_edge)
{
- old_jump = BB_END (cur_bb);
+ rtx_jump_insn *old_jump = as_a <rtx_jump_insn *> (BB_END (cur_bb));
/* Check to make sure the jump instruction is a
conditional jump. */
@@ -2088,7 +2095,8 @@ fix_crossing_conditional_branches (void)
else
{
basic_block last_bb;
- rtx_insn *new_jump;
+ rtx_insn *old_label_insn;
+ rtx_jump_insn *new_jump;
/* Create new basic block to be dest for
conditional jump. */
@@ -2099,9 +2107,10 @@ fix_crossing_conditional_branches (void)
emit_label (new_label);
gcc_assert (GET_CODE (old_label) == LABEL_REF);
- old_label = JUMP_LABEL (old_jump);
- new_jump = emit_jump_insn (gen_jump (old_label));
- JUMP_LABEL (new_jump) = old_label;
+ old_label_insn = old_jump->jump_target ();
+ new_jump = as_a <rtx_jump_insn *>
+ (emit_jump_insn (gen_jump (old_label_insn)));
+ new_jump->set_jump_target (old_label_insn);
last_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
new_bb = create_basic_block (new_label, new_jump, last_bb);
@@ -1212,7 +1212,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
btr_mode = GET_MODE (SET_DEST (set));
btr_rtx = gen_rtx_REG (btr_mode, btr);
- new_insn = as_a <rtx_insn *> (gen_move_insn (btr_rtx, src));
+ new_insn = gen_move_insn (btr_rtx, src);
/* Insert target register initialization at head of basic block. */
def->insn = emit_insn_after (new_insn, insp);
@@ -2001,7 +2001,7 @@ expand_errno_check (tree exp, rtx target)
/* Test the result; if it is NaN, set errno=EDOM because
the argument was not in the domain. */
do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
- NULL_RTX, NULL_RTX, lab,
+ NULL_RTX, NULL, lab,
/* The jump is very likely. */
REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
@@ -5938,9 +5938,9 @@ expand_builtin_acc_on_device (tree exp, rtx target)
emit_move_insn (target, const1_rtx);
rtx_code_label *done_label = gen_label_rtx ();
do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
- NULL_RTX, done_label, PROB_EVEN);
+ NULL, done_label, PROB_EVEN);
do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
- NULL_RTX, done_label, PROB_EVEN);
+ NULL, done_label, PROB_EVEN);
emit_move_insn (target, const0_rtx);
emit_label (done_label);
@@ -190,7 +190,8 @@ try_simplify_condjump (basic_block cbranch_block)
return false;
/* Invert the conditional branch. */
- if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 0))
+ if (!invert_jump (as_a <rtx_jump_insn *> (cbranch_insn),
+ block_label (jump_dest_block), 0))
return false;
if (dump_file)
@@ -2051,7 +2051,7 @@ static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
/* Returns the label_rtx expression for a label starting basic block BB. */
-static rtx
+static rtx_code_label *
label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
{
gimple_stmt_iterator gsi;
@@ -2078,7 +2078,7 @@ label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
if (DECL_NONLOCAL (lab))
break;
- return label_rtx (lab);
+ return jump_target_rtx (lab);
}
rtx_code_label *l = gen_label_rtx ();
@@ -3120,7 +3120,7 @@ expand_goto (tree label)
gcc_assert (!context || context == current_function_decl);
#endif
- emit_jump (label_rtx (label));
+ emit_jump (jump_target_rtx (label));
}
/* Output a return with no value. */
@@ -5579,7 +5579,7 @@ construct_init_block (void)
{
tree label = gimple_block_label (e->dest);
- emit_jump (label_rtx (label));
+ emit_jump (jump_target_rtx (label));
flags = 0;
}
else
@@ -999,18 +999,18 @@ rtl_can_merge_blocks (basic_block a, basic_block b)
/* Return the label in the head of basic block BLOCK. Create one if it doesn't
exist. */
-rtx
+rtx_code_label *
block_label (basic_block block)
{
if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
- return NULL_RTX;
+ return NULL;
if (!LABEL_P (BB_HEAD (block)))
{
BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
}
- return BB_HEAD (block);
+ return as_a <rtx_code_label *> (BB_HEAD (block));
}
/* Attempt to perform edge redirection by replacing possibly complex jump
@@ -1110,7 +1110,8 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
if (dump_file)
fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
INSN_UID (insn), e->dest->index, target->index);
- if (!redirect_jump (insn, block_label (target), 0))
+ if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
+ block_label (target), 0))
{
gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
return NULL;
@@ -1294,7 +1295,8 @@ patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
/* If the substitution doesn't succeed, die. This can happen
if the back end emitted unrecognizable instructions or if
target is exit block on some arches. */
- if (!redirect_jump (insn, block_label (new_bb), 0))
+ if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
+ block_label (new_bb), 0))
{
gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
return false;
@@ -1322,7 +1324,7 @@ redirect_branch_edge (edge e, basic_block target)
if (!currently_expanding_to_rtl)
{
- if (!patch_jump_insn (insn, old_label, target))
+ if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
return NULL;
}
else
@@ -1330,7 +1332,8 @@ redirect_branch_edge (edge e, basic_block target)
jumps (i.e. not yet split by find_many_sub_basic_blocks).
Redirect all of those that match our label. */
FOR_BB_INSNS (src, insn)
- if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
+ if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
+ old_label, target))
return NULL;
if (dump_file)
@@ -1521,7 +1524,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
edge b = unchecked_make_edge (e->src, target, 0);
bool redirected;
- redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
+ redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
+ block_label (target), 0);
gcc_assert (redirected);
note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
@@ -3777,10 +3781,10 @@ fixup_reorder_chain (void)
e_taken = e;
bb_end_insn = BB_END (bb);
- if (JUMP_P (bb_end_insn))
+ if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
{
- ret_label = JUMP_LABEL (bb_end_insn);
- if (any_condjump_p (bb_end_insn))
+ ret_label = JUMP_LABEL (bb_end_jump);
+ if (any_condjump_p (bb_end_jump))
{
/* This might happen if the conditional jump has side
effects and could therefore not be optimized away.
@@ -3788,10 +3792,10 @@ fixup_reorder_chain (void)
to prevent rtl_verify_flow_info from complaining. */
if (!e_fall)
{
- gcc_assert (!onlyjump_p (bb_end_insn)
- || returnjump_p (bb_end_insn)
+ gcc_assert (!onlyjump_p (bb_end_jump)
+ || returnjump_p (bb_end_jump)
|| (e_taken->flags & EDGE_CROSSING));
- emit_barrier_after (bb_end_insn);
+ emit_barrier_after (bb_end_jump);
continue;
}
@@ -3813,11 +3817,11 @@ fixup_reorder_chain (void)
edge based on known or assumed probability. */
else if (bb->aux != e_taken->dest)
{
- rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
+ rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
if (note
&& XINT (note, 0) < REG_BR_PROB_BASE / 2
- && invert_jump (bb_end_insn,
+ && invert_jump (bb_end_jump,
(e_fall->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun)
? NULL_RTX
@@ -3840,7 +3844,7 @@ fixup_reorder_chain (void)
/* Otherwise we can try to invert the jump. This will
basically never fail, however, keep up the pretense. */
- else if (invert_jump (bb_end_insn,
+ else if (invert_jump (bb_end_jump,
(e_fall->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun)
? NULL_RTX
@@ -4961,7 +4965,7 @@ rtl_lv_add_condition_to_bb (basic_block first_head ,
basic_block second_head ATTRIBUTE_UNUSED,
basic_block cond_bb, void *comp_rtx)
{
- rtx label;
+ rtx_code_label *label;
rtx_insn *seq, *jump;
rtx op0 = XEXP ((rtx)comp_rtx, 0);
rtx op1 = XEXP ((rtx)comp_rtx, 1);
@@ -4977,8 +4981,7 @@ rtl_lv_add_condition_to_bb (basic_block first_head ,
start_sequence ();
op0 = force_operand (op0, NULL_RTX);
op1 = force_operand (op1, NULL_RTX);
- do_compare_rtx_and_jump (op0, op1, comp, 0,
- mode, NULL_RTX, NULL_RTX, label, -1);
+ do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label, -1);
jump = get_last_insn ();
JUMP_LABEL (jump) = label;
LABEL_NUSES (label)++;
@@ -33,7 +33,7 @@ extern bool contains_no_active_insn_p (const_basic_block);
extern bool forwarder_block_p (const_basic_block);
extern bool can_fallthru (basic_block, basic_block);
extern rtx_note *bb_note (basic_block);
-extern rtx block_label (basic_block);
+extern rtx_code_label *block_label (basic_block);
extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
extern void emit_barrier_after_bb (basic_block bb);
extern basic_block force_nonfallthru_and_redirect (edge, basic_block, rtx);
@@ -38390,7 +38390,7 @@ ix86_emit_cmove (rtx dst, rtx src, enum rtx_code code, rtx op1, rtx op2)
}
else
{
- rtx nomove = gen_label_rtx ();
+ rtx_code_label *nomove = gen_label_rtx ();
emit_cmp_and_jump_insns (op1, op2, reverse_condition (code),
const0_rtx, GET_MODE (op1), 1, nomove);
emit_move_insn (dst, src);
@@ -61,10 +61,12 @@ along with GCC; see the file COPYING3. If not see
#include "tm_p.h"
static bool prefer_and_bit_test (machine_mode, int);
-static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
-static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
-static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
- rtx, int);
+static void do_jump_by_parts_greater (tree, tree, int,
+ rtx_code_label *, rtx_code_label *, int);
+static void do_jump_by_parts_equality (tree, tree, rtx_code_label *,
+ rtx_code_label *, int);
+static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
+ rtx_code_label *, rtx_code_label *, int);
/* Invert probability if there is any. -1 stands for unknown. */
@@ -146,34 +148,34 @@ restore_pending_stack_adjust (saved_pending_stack_adjust *save)
/* Expand conditional expressions. */
-/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
- LABEL is an rtx of code CODE_LABEL, in this function and all the
- functions here. */
+/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
void
-jumpifnot (tree exp, rtx label, int prob)
+jumpifnot (tree exp, rtx_code_label *label, int prob)
{
- do_jump (exp, label, NULL_RTX, inv (prob));
+ do_jump (exp, label, NULL, inv (prob));
}
void
-jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
+jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
+ int prob)
{
- do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
+ do_jump_1 (code, op0, op1, label, NULL, inv (prob));
}
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
void
-jumpif (tree exp, rtx label, int prob)
+jumpif (tree exp, rtx_code_label *label, int prob)
{
- do_jump (exp, NULL_RTX, label, prob);
+ do_jump (exp, NULL, label, prob);
}
void
-jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
+jumpif_1 (enum tree_code code, tree op0, tree op1,
+ rtx_code_label *label, int prob)
{
- do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
+ do_jump_1 (code, op0, op1, NULL, label, prob);
}
/* Used internally by prefer_and_bit_test. */
@@ -225,7 +227,8 @@ prefer_and_bit_test (machine_mode mode, int bitnum)
void
do_jump_1 (enum tree_code code, tree op0, tree op1,
- rtx if_false_label, rtx if_true_label, int prob)
+ rtx_code_label *if_false_label, rtx_code_label *if_true_label,
+ int prob)
{
machine_mode mode;
rtx_code_label *drop_through_label = 0;
@@ -378,15 +381,15 @@ do_jump_1 (enum tree_code code, tree op0, tree op1,
op0_prob = inv (op0_false_prob);
op1_prob = inv (op1_false_prob);
}
- if (if_false_label == NULL_RTX)
+ if (if_false_label == NULL)
{
drop_through_label = gen_label_rtx ();
- do_jump (op0, drop_through_label, NULL_RTX, op0_prob);
- do_jump (op1, NULL_RTX, if_true_label, op1_prob);
+ do_jump (op0, drop_through_label, NULL, op0_prob);
+ do_jump (op1, NULL, if_true_label, op1_prob);
}
else
{
- do_jump (op0, if_false_label, NULL_RTX, op0_prob);
+ do_jump (op0, if_false_label, NULL, op0_prob);
do_jump (op1, if_false_label, if_true_label, op1_prob);
}
break;
@@ -405,18 +408,18 @@ do_jump_1 (enum tree_code code, tree op0, tree op1,
{
op0_prob = prob / 2;
op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
- }
- if (if_true_label == NULL_RTX)
- {
- drop_through_label = gen_label_rtx ();
- do_jump (op0, NULL_RTX, drop_through_label, op0_prob);
- do_jump (op1, if_false_label, NULL_RTX, op1_prob);
- }
- else
- {
- do_jump (op0, NULL_RTX, if_true_label, op0_prob);
- do_jump (op1, if_false_label, if_true_label, op1_prob);
- }
+ }
+ if (if_true_label == NULL)
+ {
+ drop_through_label = gen_label_rtx ();
+ do_jump (op0, NULL, drop_through_label, op0_prob);
+ do_jump (op1, if_false_label, NULL, op1_prob);
+ }
+ else
+ {
+ do_jump (op0, NULL, if_true_label, op0_prob);
+ do_jump (op1, if_false_label, if_true_label, op1_prob);
+ }
break;
}
@@ -443,14 +446,15 @@ do_jump_1 (enum tree_code code, tree op0, tree op1,
PROB is probability of jump to if_true_label, or -1 if unknown. */
void
-do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
+do_jump (tree exp, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
enum tree_code code = TREE_CODE (exp);
rtx temp;
int i;
tree type;
machine_mode mode;
- rtx_code_label *drop_through_label = 0;
+ rtx_code_label *drop_through_label = NULL;
switch (code)
{
@@ -458,10 +462,13 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
break;
case INTEGER_CST:
- temp = integer_zerop (exp) ? if_false_label : if_true_label;
- if (temp)
- emit_jump (temp);
- break;
+ {
+ rtx_code_label *lab = integer_zerop (exp) ? if_false_label
+ : if_true_label;
+ if (lab)
+ emit_jump (lab);
+ break;
+ }
#if 0
/* This is not true with #pragma weak */
@@ -511,7 +518,7 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
}
do_pending_stack_adjust ();
- do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
+ do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
emit_label (label1);
do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
@@ -555,7 +562,7 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
if (integer_onep (TREE_OPERAND (exp, 1)))
{
tree exp0 = TREE_OPERAND (exp, 0);
- rtx set_label, clr_label;
+ rtx_code_label *set_label, *clr_label;
int setclr_prob = prob;
/* Strip narrowing integral type conversions. */
@@ -684,11 +691,12 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
static void
do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
- rtx op1, rtx if_false_label, rtx if_true_label,
+ rtx op1, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
int prob)
{
int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = 0;
bool drop_through_if_true = false, drop_through_if_false = false;
enum rtx_code code = GT;
int i;
@@ -735,7 +743,7 @@ do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
/* All but high-order word must be compared as unsigned. */
do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
- word_mode, NULL_RTX, NULL_RTX, if_true_label,
+ word_mode, NULL_RTX, NULL, if_true_label,
prob);
/* Emit only one comparison for 0. Do not emit the last cond jump. */
@@ -744,7 +752,7 @@ do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
/* Consider lower words only if these are equal. */
do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, NULL_RTX, if_false_label, inv (prob));
+ NULL_RTX, NULL, if_false_label, inv (prob));
}
if (!drop_through_if_false)
@@ -760,7 +768,8 @@ do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
static void
do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
- rtx if_false_label, rtx if_true_label, int prob)
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
rtx op0 = expand_normal (swap ? treeop1 : treeop0);
rtx op1 = expand_normal (swap ? treeop0 : treeop1);
@@ -773,17 +782,18 @@ do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
/* Jump according to whether OP0 is 0. We assume that OP0 has an integer
mode, MODE, that is too wide for the available compare insns. Either
- Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
+ Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
to indicate drop through. */
static void
do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
- rtx if_false_label, rtx if_true_label, int prob)
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
rtx part;
int i;
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = NULL;
/* The fastest way of doing this comparison on almost any machine is to
"or" all the words and compare the result. If all have to be loaded
@@ -806,12 +816,12 @@ do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
/* If we couldn't do the "or" simply, do this with a series of compares. */
if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ if_false_label = drop_through_label = gen_label_rtx ();
for (i = 0; i < nwords; i++)
do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
const0_rtx, EQ, 1, word_mode, NULL_RTX,
- if_false_label, NULL_RTX, prob);
+ if_false_label, NULL, prob);
if (if_true_label)
emit_jump (if_true_label);
@@ -827,10 +837,11 @@ do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
static void
do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
- rtx if_false_label, rtx if_true_label, int prob)
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = NULL;
int i;
if (op1 == const0_rtx)
@@ -853,7 +864,7 @@ do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
operand_subword_force (op1, i, mode),
EQ, 0, word_mode, NULL_RTX,
- if_false_label, NULL_RTX, prob);
+ if_false_label, NULL, prob);
if (if_true_label)
emit_jump (if_true_label);
@@ -865,8 +876,9 @@ do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
with one insn, test the comparison and jump to the appropriate label. */
static void
-do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
- rtx if_true_label, int prob)
+do_jump_by_parts_equality (tree treeop0, tree treeop1,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
rtx op0 = expand_normal (treeop0);
rtx op1 = expand_normal (treeop1);
@@ -961,11 +973,12 @@ split_comparison (enum rtx_code code, machine_mode mode,
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
- machine_mode mode, rtx size, rtx if_false_label,
- rtx if_true_label, int prob)
+ machine_mode mode, rtx size,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
rtx tem;
- rtx dummy_label = NULL;
+ rtx_code_label *dummy_label = NULL;
/* Reverse the comparison if that is safe and we want to jump if it is
false. Also convert to the reverse comparison if the target can
@@ -987,9 +1000,7 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
if (can_compare_p (rcode, mode, ccp_jump)
|| (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
{
- tem = if_true_label;
- if_true_label = if_false_label;
- if_false_label = tem;
+ std::swap (if_true_label, if_false_label);
code = rcode;
prob = inv (prob);
}
@@ -1000,9 +1011,7 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
if (swap_commutative_operands_p (op0, op1))
{
- tem = op0;
- op0 = op1;
- op1 = tem;
+ std::swap (op0, op1);
code = swap_condition (code);
}
@@ -1014,8 +1023,9 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
{
if (CONSTANT_P (tem))
{
- rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
- ? if_false_label : if_true_label;
+ rtx_code_label *label = (tem == const0_rtx
+ || tem == CONST0_RTX (mode))
+ ? if_false_label : if_true_label;
if (label)
emit_jump (label);
return;
@@ -1134,7 +1144,7 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
if (and_them)
{
- rtx dest_label;
+ rtx_code_label *dest_label;
/* If we only jump if true, just bypass the second jump. */
if (! if_false_label)
{
@@ -1145,13 +1155,11 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
else
dest_label = if_false_label;
do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
- size, dest_label, NULL_RTX,
- first_prob);
+ size, dest_label, NULL, first_prob);
}
else
do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
- size, NULL_RTX, if_true_label,
- first_prob);
+ size, NULL, if_true_label, first_prob);
}
}
@@ -1177,8 +1185,9 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
static void
do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
- enum rtx_code unsigned_code, rtx if_false_label,
- rtx if_true_label, int prob)
+ enum rtx_code unsigned_code,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob)
{
rtx op0, op1;
tree type;
@@ -57,20 +57,23 @@ extern void save_pending_stack_adjust (saved_pending_stack_adjust *);
extern void restore_pending_stack_adjust (saved_pending_stack_adjust *);
/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
-extern void jumpifnot (tree, rtx, int);
-extern void jumpifnot_1 (enum tree_code, tree, tree, rtx, int);
+extern void jumpifnot (tree exp, rtx_code_label *label, int prob);
+extern void jumpifnot_1 (enum tree_code, tree, tree, rtx_code_label *, int);
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
-extern void jumpif (tree, rtx, int);
-extern void jumpif_1 (enum tree_code, tree, tree, rtx, int);
+extern void jumpif (tree exp, rtx_code_label *label, int prob);
+extern void jumpif_1 (enum tree_code, tree, tree, rtx_code_label *, int);
/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
the result is zero, or IF_TRUE_LABEL if the result is one. */
-extern void do_jump (tree, rtx, rtx, int);
-extern void do_jump_1 (enum tree_code, tree, tree, rtx, rtx, int);
+extern void do_jump (tree exp, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, int prob);
+extern void do_jump_1 (enum tree_code, tree, tree, rtx_code_label *,
+ rtx_code_label *, int);
extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
- machine_mode, rtx, rtx, rtx, int);
+ machine_mode, rtx, rtx_code_label *,
+ rtx_code_label *, int);
extern bool split_comparison (enum rtx_code, machine_mode,
enum rtx_code *, enum rtx_code *);
@@ -907,7 +907,7 @@ emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
end_sequence ();
}
else
- new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
+ new_insn = gen_move_insn (dest, src);
info.first = new_insn;
info.fixed_regs_live = insn_info->fixed_regs_live;
info.failure = false;
@@ -4441,13 +4441,15 @@ emit_barrier_before (rtx before)
/* Emit the label LABEL before the insn BEFORE. */
-rtx_insn *
-emit_label_before (rtx label, rtx_insn *before)
+rtx_code_label *
+emit_label_before (rtx uncast_label, rtx_insn *before)
{
+ rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
+
gcc_checking_assert (INSN_UID (label) == 0);
INSN_UID (label) = cur_insn_uid++;
add_insn_before (label, before, NULL);
- return as_a <rtx_insn *> (label);
+ return label;
}
/* Helper for emit_insn_after, handles lists of instructions
@@ -5068,13 +5070,15 @@ emit_call_insn (rtx x)
/* Add the label LABEL to the end of the doubly-linked list. */
-rtx_insn *
-emit_label (rtx label)
+rtx_code_label *
+emit_label (rtx uncast_label)
{
+ rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
+
gcc_checking_assert (INSN_UID (label) == 0);
INSN_UID (label) = cur_insn_uid++;
- add_insn (as_a <rtx_insn *> (label));
- return as_a <rtx_insn *> (label);
+ add_insn (label);
+ return label;
}
/* Make an insn of code JUMP_TABLE_DATA
@@ -5335,7 +5339,7 @@ emit (rtx x)
switch (code)
{
case CODE_LABEL:
- return emit_label (x);
+ return emit_label (as_a <rtx_code_label *> (x));
case INSN:
return emit_insn (x);
case JUMP_INSN:
@@ -1349,7 +1349,7 @@ sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
if (lp && lp->post_landing_pad)
{
rtx_insn *seq2;
- rtx label;
+ rtx_code_label *label;
start_sequence ();
@@ -1363,7 +1363,7 @@ sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
t = build_int_cst (integer_type_node, disp_index);
case_elt = build_case_label (t, NULL, t_label);
dispatch_labels.quick_push (case_elt);
- label = label_rtx (t_label);
+ label = jump_target_rtx (t_label);
}
else
label = gen_label_rtx ();
@@ -984,7 +984,7 @@ emit_stack_save (enum save_level save_level, rtx *psave)
{
rtx sa = *psave;
/* The default is that we use a move insn and save in a Pmode object. */
- rtx (*fcn) (rtx, rtx) = gen_move_insn;
+ rtx_insn * (*fcn) (rtx, rtx) = gen_move_insn;
machine_mode mode = STACK_SAVEAREA_MODE (save_level);
/* See if this machine has anything special to do for this kind of save. */
@@ -1039,7 +1039,7 @@ void
emit_stack_restore (enum save_level save_level, rtx sa)
{
/* The default is that we use a move insn. */
- rtx (*fcn) (rtx, rtx) = gen_move_insn;
+ rtx_insn * (*fcn) (rtx, rtx) = gen_move_insn;
/* If stack_realign_drap, the x86 backend emits a prologue that aligns both
STACK_POINTER and HARD_FRAME_POINTER.
@@ -5807,8 +5807,8 @@ emit_store_flag_force (rtx target, enum rtx_code code, rtx op0, rtx op1,
&& op1 == const0_rtx)
{
label = gen_label_rtx ();
- do_compare_rtx_and_jump (target, const0_rtx, EQ, unsignedp,
- mode, NULL_RTX, NULL_RTX, label, -1);
+ do_compare_rtx_and_jump (target, const0_rtx, EQ, unsignedp, mode,
+ NULL_RTX, NULL, label, -1);
emit_move_insn (target, trueval);
emit_label (label);
return target;
@@ -5845,8 +5845,8 @@ emit_store_flag_force (rtx target, enum rtx_code code, rtx op0, rtx op1,
emit_move_insn (target, trueval);
label = gen_label_rtx ();
- do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, NULL_RTX,
- NULL_RTX, label, -1);
+ do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, NULL_RTX, NULL,
+ label, -1);
emit_move_insn (target, falseval);
emit_label (label);
@@ -5863,6 +5863,6 @@ do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, machine_mode mode,
rtx_code_label *label)
{
int unsignedp = (op == LTU || op == LEU || op == GTU || op == GEU);
- do_compare_rtx_and_jump (arg1, arg2, op, unsignedp, mode,
- NULL_RTX, NULL_RTX, label, -1);
+ do_compare_rtx_and_jump (arg1, arg2, op, unsignedp, mode, NULL_RTX,
+ NULL, label, -1);
}
@@ -3652,7 +3652,7 @@ emit_move_insn (rtx x, rtx y)
/* Generate the body of an instruction to copy Y into X.
It may be a list of insns, if one insn isn't enough. */
-rtx
+rtx_insn *
gen_move_insn (rtx x, rtx y)
{
rtx_insn *seq;
@@ -8128,6 +8128,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
enum expand_modifier modifier)
{
rtx op0, op1, op2, temp;
+ rtx_code_label *lab;
tree type;
int unsignedp;
machine_mode mode;
@@ -8870,11 +8871,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
/* If op1 was placed in target, swap op0 and op1. */
if (target != op0 && target == op1)
- {
- temp = op0;
- op0 = op1;
- op1 = temp;
- }
+ std::swap (op0, op1);
/* We generate better code and avoid problems with op1 mentioning
target by forcing op1 into a pseudo if it isn't a constant. */
@@ -8941,13 +8938,13 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
if (target != op0)
emit_move_insn (target, op0);
- temp = gen_label_rtx ();
+ lab = gen_label_rtx ();
do_compare_rtx_and_jump (target, cmpop1, comparison_code,
- unsignedp, mode, NULL_RTX, NULL_RTX, temp,
+ unsignedp, mode, NULL_RTX, NULL, lab,
-1);
}
emit_move_insn (target, op1);
- emit_label (temp);
+ emit_label (lab);
return target;
case BIT_NOT_EXPR:
@@ -9025,38 +9022,39 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
case UNGE_EXPR:
case UNEQ_EXPR:
case LTGT_EXPR:
- temp = do_store_flag (ops,
- modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
- tmode != VOIDmode ? tmode : mode);
- if (temp)
- return temp;
-
- /* Use a compare and a jump for BLKmode comparisons, or for function
- type comparisons is HAVE_canonicalize_funcptr_for_compare. */
-
- if ((target == 0
- || modifier == EXPAND_STACK_PARM
- || ! safe_from_p (target, treeop0, 1)
- || ! safe_from_p (target, treeop1, 1)
- /* Make sure we don't have a hard reg (such as function's return
- value) live across basic blocks, if not optimizing. */
- || (!optimize && REG_P (target)
- && REGNO (target) < FIRST_PSEUDO_REGISTER)))
- target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
+ {
+ temp = do_store_flag (ops,
+ modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
+ tmode != VOIDmode ? tmode : mode);
+ if (temp)
+ return temp;
- emit_move_insn (target, const0_rtx);
+ /* Use a compare and a jump for BLKmode comparisons, or for function
+ type comparisons is HAVE_canonicalize_funcptr_for_compare. */
+
+ if ((target == 0
+ || modifier == EXPAND_STACK_PARM
+ || ! safe_from_p (target, treeop0, 1)
+ || ! safe_from_p (target, treeop1, 1)
+ /* Make sure we don't have a hard reg (such as function's return
+ value) live across basic blocks, if not optimizing. */
+ || (!optimize && REG_P (target)
+ && REGNO (target) < FIRST_PSEUDO_REGISTER)))
+ target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- op1 = gen_label_rtx ();
- jumpifnot_1 (code, treeop0, treeop1, op1, -1);
+ emit_move_insn (target, const0_rtx);
- if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
- emit_move_insn (target, constm1_rtx);
- else
- emit_move_insn (target, const1_rtx);
+ rtx_code_label *lab1 = gen_label_rtx ();
+ jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
- emit_label (op1);
- return target;
+ if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
+ emit_move_insn (target, constm1_rtx);
+ else
+ emit_move_insn (target, const1_rtx);
+ emit_label (lab1);
+ return target;
+ }
case COMPLEX_EXPR:
/* Get the rtx code of the operands. */
op0 = expand_normal (treeop0);
@@ -9279,58 +9277,60 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
}
case COND_EXPR:
- /* A COND_EXPR with its type being VOID_TYPE represents a
- conditional jump and is handled in
- expand_gimple_cond_expr. */
- gcc_assert (!VOID_TYPE_P (type));
-
- /* Note that COND_EXPRs whose type is a structure or union
- are required to be constructed to contain assignments of
- a temporary variable, so that we can evaluate them here
- for side effect only. If type is void, we must do likewise. */
-
- gcc_assert (!TREE_ADDRESSABLE (type)
- && !ignore
- && TREE_TYPE (treeop1) != void_type_node
- && TREE_TYPE (treeop2) != void_type_node);
-
- temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
- if (temp)
- return temp;
-
- /* If we are not to produce a result, we have no target. Otherwise,
- if a target was specified use it; it will not be used as an
- intermediate target unless it is safe. If no target, use a
- temporary. */
-
- if (modifier != EXPAND_STACK_PARM
- && original_target
- && safe_from_p (original_target, treeop0, 1)
- && GET_MODE (original_target) == mode
- && !MEM_P (original_target))
- temp = original_target;
- else
- temp = assign_temp (type, 0, 1);
-
- do_pending_stack_adjust ();
- NO_DEFER_POP;
- op0 = gen_label_rtx ();
- op1 = gen_label_rtx ();
- jumpifnot (treeop0, op0, -1);
- store_expr (treeop1, temp,
- modifier == EXPAND_STACK_PARM,
- false);
-
- emit_jump_insn (gen_jump (op1));
- emit_barrier ();
- emit_label (op0);
- store_expr (treeop2, temp,
- modifier == EXPAND_STACK_PARM,
- false);
+ {
+ /* A COND_EXPR with its type being VOID_TYPE represents a
+ conditional jump and is handled in
+ expand_gimple_cond_expr. */
+ gcc_assert (!VOID_TYPE_P (type));
+
+ /* Note that COND_EXPRs whose type is a structure or union
+ are required to be constructed to contain assignments of
+ a temporary variable, so that we can evaluate them here
+ for side effect only. If type is void, we must do likewise. */
+
+ gcc_assert (!TREE_ADDRESSABLE (type)
+ && !ignore
+ && TREE_TYPE (treeop1) != void_type_node
+ && TREE_TYPE (treeop2) != void_type_node);
+
+ temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
+ if (temp)
+ return temp;
- emit_label (op1);
- OK_DEFER_POP;
- return temp;
+ /* If we are not to produce a result, we have no target. Otherwise,
+ if a target was specified use it; it will not be used as an
+ intermediate target unless it is safe. If no target, use a
+ temporary. */
+
+ if (modifier != EXPAND_STACK_PARM
+ && original_target
+ && safe_from_p (original_target, treeop0, 1)
+ && GET_MODE (original_target) == mode
+ && !MEM_P (original_target))
+ temp = original_target;
+ else
+ temp = assign_temp (type, 0, 1);
+
+ do_pending_stack_adjust ();
+ NO_DEFER_POP;
+ rtx_code_label *lab0 = gen_label_rtx ();
+ rtx_code_label *lab1 = gen_label_rtx ();
+ jumpifnot (treeop0, lab0, -1);
+ store_expr (treeop1, temp,
+ modifier == EXPAND_STACK_PARM,
+ false);
+
+ emit_jump_insn (gen_jump (lab1));
+ emit_barrier ();
+ emit_label (lab0);
+ store_expr (treeop2, temp,
+ modifier == EXPAND_STACK_PARM,
+ false);
+
+ emit_label (lab1);
+ OK_DEFER_POP;
+ return temp;
+ }
case VEC_COND_EXPR:
target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
@@ -203,7 +203,7 @@ extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
/* Emit insns to set X from Y. */
extern rtx_insn *emit_move_insn (rtx, rtx);
-extern rtx gen_move_insn (rtx, rtx);
+extern rtx_insn *gen_move_insn (rtx, rtx);
/* Emit insns to set X from Y, with no frills. */
extern rtx_insn *emit_move_insn_1 (rtx, rtx);
@@ -5786,7 +5786,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
dest = simple_return_rtx;
else
dest = ret_rtx;
- if (!redirect_jump (jump, dest, 0))
+ if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
{
#ifdef HAVE_simple_return
if (simple_p)
@@ -2229,7 +2229,8 @@ pre_insert_copy_insn (struct gcse_expr *expr, rtx_insn *insn)
int regno = REGNO (reg);
int indx = expr->bitmap_index;
rtx pat = PATTERN (insn);
- rtx set, first_set, new_insn;
+ rtx set, first_set;
+ rtx_insn *new_insn;
rtx old_reg;
int i;
@@ -4444,9 +4444,10 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
else
new_dest_label = block_label (new_dest);
+ rtx_jump_insn *jump_insn = as_a <rtx_jump_insn *> (jump);
if (reversep
- ? ! invert_jump_1 (jump, new_dest_label)
- : ! redirect_jump_1 (jump, new_dest_label))
+ ? ! invert_jump_1 (jump_insn, new_dest_label)
+ : ! redirect_jump_1 (jump_insn, new_dest_label))
goto cancel;
}
@@ -4457,7 +4458,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
if (other_bb != new_dest)
{
- redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep);
+ redirect_jump_2 (as_a <rtx_jump_insn *> (jump), old_dest, new_dest_label,
+ 0, reversep);
redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
if (reversep)
@@ -422,7 +422,7 @@ expand_arith_overflow_result_store (tree lhs, rtx target,
lres = convert_modes (tgtmode, mode, res, uns);
gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
- EQ, true, mode, NULL_RTX, NULL_RTX, done_label,
+ EQ, true, mode, NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
write_complex_part (target, const1_rtx, true);
emit_label (done_label);
@@ -569,7 +569,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
: CONST_SCALAR_INT_P (op1)))
tem = op1;
do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
- true, mode, NULL_RTX, NULL_RTX, done_label,
+ true, mode, NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -584,7 +584,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
rtx tem = expand_binop (mode, add_optab,
code == PLUS_EXPR ? res : op0, sgn,
NULL_RTX, false, OPTAB_LIB_WIDEN);
- do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
+ do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL,
done_label, PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -627,8 +627,8 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
else if (pos_neg == 3)
/* If ARG0 is not known to be always positive, check at runtime. */
do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
- NULL_RTX, do_error, PROB_VERY_UNLIKELY);
- do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL_RTX,
+ NULL, do_error, PROB_VERY_UNLIKELY);
+ do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL,
done_label, PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -642,7 +642,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
OPTAB_LIB_WIDEN);
rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
OPTAB_LIB_WIDEN);
- do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL_RTX,
+ do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL,
done_label, PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -655,7 +655,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
- NULL_RTX, do_error, PROB_VERY_UNLIKELY);
+ NULL, do_error, PROB_VERY_UNLIKELY);
rtx tem = op1;
/* The operation is commutative, so we can pick operand to compare
against. For prec <= BITS_PER_WORD, I think preferring REG operand
@@ -668,7 +668,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
: CONST_SCALAR_INT_P (op0))
tem = op0;
- do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL_RTX,
+ do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL,
done_label, PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -698,26 +698,26 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
? and_optab : ior_optab,
op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
- do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL,
+ NULL, done_label, PROB_VERY_LIKELY);
}
else
{
rtx_code_label *do_ior_label = gen_label_rtx ();
do_compare_rtx_and_jump (op1, const0_rtx,
code == MINUS_EXPR ? GE : LT, false, mode,
- NULL_RTX, NULL_RTX, do_ior_label,
+ NULL_RTX, NULL, do_ior_label,
PROB_EVEN);
tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
emit_jump (do_error);
emit_label (do_ior_label);
tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
}
goto do_error_label;
}
@@ -730,14 +730,14 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
OPTAB_LIB_WIDEN);
rtx_code_label *op0_geu_op1 = gen_label_rtx ();
- do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
+ do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL,
op0_geu_op1, PROB_EVEN);
do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
emit_jump (do_error);
emit_label (op0_geu_op1);
do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -816,12 +816,12 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
/* If the op1 is negative, we have to use a different check. */
if (pos_neg == 3)
do_compare_rtx_and_jump (op1, const0_rtx, LT, false, mode, NULL_RTX,
- NULL_RTX, sub_check, PROB_EVEN);
+ NULL, sub_check, PROB_EVEN);
/* Compare the result of the operation with one of the operands. */
if (pos_neg & 1)
do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? GE : LE,
- false, mode, NULL_RTX, NULL_RTX, done_label,
+ false, mode, NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
/* If we get here, we have to print the error. */
@@ -835,7 +835,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
/* We have k = a + b for b < 0 here. k <= a must hold. */
if (pos_neg & 2)
do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? LE : GE,
- false, mode, NULL_RTX, NULL_RTX, done_label,
+ false, mode, NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
}
@@ -931,7 +931,7 @@ expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
/* Compare the operand with the most negative value. */
rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
- do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL_RTX,
+ do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL,
done_label, PROB_VERY_LIKELY);
}
@@ -1068,15 +1068,15 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
ops.location = loc;
res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
goto do_error_label;
case 3:
rtx_code_label *do_main_label;
do_main_label = gen_label_rtx ();
do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, do_main_label, PROB_VERY_LIKELY);
+ NULL, do_main_label, PROB_VERY_LIKELY);
do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
- NULL_RTX, do_main_label, PROB_VERY_LIKELY);
+ NULL, do_main_label, PROB_VERY_LIKELY);
write_complex_part (target, const1_rtx, true);
emit_label (do_main_label);
goto do_main;
@@ -1113,15 +1113,15 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
ops.location = loc;
res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
- NULL_RTX, do_error, PROB_VERY_UNLIKELY);
+ NULL, do_error, PROB_VERY_UNLIKELY);
int prec;
prec = GET_MODE_PRECISION (mode);
rtx sgn;
sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
- NULL_RTX, done_label, PROB_VERY_LIKELY);
+ NULL, done_label, PROB_VERY_LIKELY);
goto do_error_label;
case 3:
/* Rest of handling of this case after res is computed. */
@@ -1167,7 +1167,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode,
- NULL_RTX, NULL_RTX, done_label,
+ NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
goto do_error_label;
}
@@ -1185,8 +1185,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, after_negate_label,
- PROB_VERY_LIKELY);
+ NULL, after_negate_label, PROB_VERY_LIKELY);
/* Both arguments negative here, negate them and continue with
normal unsigned overflow checking multiplication. */
emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
@@ -1202,13 +1201,13 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
tem2 = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
OPTAB_LIB_WIDEN);
do_compare_rtx_and_jump (tem2, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, do_main_label, PROB_VERY_LIKELY);
+ NULL, do_main_label, PROB_VERY_LIKELY);
/* One argument is negative here, the other positive. This
overflows always, unless one of the arguments is 0. But
if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
is, thus we can keep do_main code oring in overflow as is. */
do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode, NULL_RTX,
- NULL_RTX, do_main_label, PROB_VERY_LIKELY);
+ NULL, do_main_label, PROB_VERY_LIKELY);
write_complex_part (target, const1_rtx, true);
emit_label (do_main_label);
goto do_main;
@@ -1274,7 +1273,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
/* For the unsigned multiplication, there was overflow if
HIPART is non-zero. */
do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
- NULL_RTX, NULL_RTX, done_label,
+ NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
else
{
@@ -1284,7 +1283,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
the high half. There was overflow if
HIPART is different from RES < 0 ? -1 : 0. */
do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
- NULL_RTX, NULL_RTX, done_label,
+ NULL_RTX, NULL, done_label,
PROB_VERY_LIKELY);
}
}
@@ -1377,12 +1376,12 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
if (!op0_small_p)
do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
- NULL_RTX, NULL_RTX, large_op0,
+ NULL_RTX, NULL, large_op0,
PROB_UNLIKELY);
if (!op1_small_p)
do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
- NULL_RTX, NULL_RTX, small_op0_large_op1,
+ NULL_RTX, NULL, small_op0_large_op1,
PROB_UNLIKELY);
/* If both op0 and op1 are sign (!uns) or zero (uns) extended from
@@ -1428,7 +1427,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
if (!op1_small_p)
do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
- NULL_RTX, NULL_RTX, both_ops_large,
+ NULL_RTX, NULL, both_ops_large,
PROB_UNLIKELY);
/* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
@@ -1465,7 +1464,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
emit_jump (after_hipart_neg);
else if (larger_sign != -1)
do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
- NULL_RTX, NULL_RTX, after_hipart_neg,
+ NULL_RTX, NULL, after_hipart_neg,
PROB_EVEN);
tem = convert_modes (mode, hmode, lopart, 1);
@@ -1481,7 +1480,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
emit_jump (after_lopart_neg);
else if (smaller_sign != -1)
do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
- NULL_RTX, NULL_RTX, after_lopart_neg,
+ NULL_RTX, NULL, after_lopart_neg,
PROB_EVEN);
tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
@@ -1510,7 +1509,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
hprec - 1, NULL_RTX, 0);
do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
- NULL_RTX, NULL_RTX, do_overflow,
+ NULL_RTX, NULL, do_overflow,
PROB_VERY_UNLIKELY);
/* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
@@ -1546,7 +1545,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
NULL_RTX, 1, OPTAB_DIRECT);
do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
- NULL_RTX, NULL_RTX, do_error,
+ NULL_RTX, NULL, do_error,
PROB_VERY_UNLIKELY);
}
@@ -1555,7 +1554,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
NULL_RTX, 1, OPTAB_DIRECT);
do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
- NULL_RTX, NULL_RTX, do_error,
+ NULL_RTX, NULL, do_error,
PROB_VERY_UNLIKELY);
}
@@ -1566,18 +1565,18 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
emit_jump (hipart_different);
else if (op0_sign == 1 || op1_sign == 1)
do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
- NULL_RTX, NULL_RTX, hipart_different,
+ NULL_RTX, NULL, hipart_different,
PROB_EVEN);
do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode,
- NULL_RTX, NULL_RTX, do_error,
+ NULL_RTX, NULL, do_error,
PROB_VERY_UNLIKELY);
emit_jump (done_label);
emit_label (hipart_different);
do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
- NULL_RTX, NULL_RTX, do_error,
+ NULL_RTX, NULL, do_error,
PROB_VERY_UNLIKELY);
emit_jump (done_label);
}
@@ -1623,7 +1622,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
{
rtx_code_label *all_done_label = gen_label_rtx ();
do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, all_done_label, PROB_VERY_LIKELY);
+ NULL, all_done_label, PROB_VERY_LIKELY);
write_complex_part (target, const1_rtx, true);
emit_label (all_done_label);
}
@@ -1634,13 +1633,13 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
rtx_code_label *all_done_label = gen_label_rtx ();
rtx_code_label *set_noovf = gen_label_rtx ();
do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
- NULL_RTX, all_done_label, PROB_VERY_LIKELY);
+ NULL, all_done_label, PROB_VERY_LIKELY);
write_complex_part (target, const1_rtx, true);
do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
- NULL_RTX, set_noovf, PROB_VERY_LIKELY);
+ NULL, set_noovf, PROB_VERY_LIKELY);
do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
- NULL_RTX, all_done_label, PROB_VERY_UNLIKELY);
- do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL_RTX,
+ NULL, all_done_label, PROB_VERY_UNLIKELY);
+ do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL,
all_done_label, PROB_VERY_UNLIKELY);
emit_label (set_noovf);
write_complex_part (target, const0_rtx, true);
@@ -4991,7 +4991,7 @@ split_live_ranges_for_shrink_wrap (void)
if (newreg)
{
- rtx new_move = gen_move_insn (newreg, dest);
+ rtx_insn *new_move = gen_move_insn (newreg, dest);
emit_insn_after (new_move, bb_note (call_dom));
if (dump_file)
{
@@ -1580,9 +1580,9 @@ redirect_jump_1 (rtx jump, rtx nlabel)
(this can only occur when trying to produce return insns). */
int
-redirect_jump (rtx jump, rtx nlabel, int delete_unused)
+redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
{
- rtx olabel = JUMP_LABEL (jump);
+ rtx olabel = jump->jump_label ();
if (!nlabel)
{
@@ -1612,7 +1612,7 @@ redirect_jump (rtx jump, rtx nlabel, int delete_unused)
If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
count has dropped to zero. */
void
-redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
+redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
int invert)
{
rtx note;
@@ -1700,7 +1700,7 @@ invert_exp_1 (rtx x, rtx insn)
inversion and redirection. */
int
-invert_jump_1 (rtx_insn *jump, rtx nlabel)
+invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
{
rtx x = pc_set (jump);
int ochanges;
@@ -1724,7 +1724,7 @@ invert_jump_1 (rtx_insn *jump, rtx nlabel)
NLABEL instead of where it jumps now. Return true if successful. */
int
-invert_jump (rtx_insn *jump, rtx nlabel, int delete_unused)
+invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
{
rtx olabel = JUMP_LABEL (jump);
@@ -794,10 +794,11 @@ split_edge_and_insert (edge e, rtx_insn *insns)
in order to create a jump. */
static rtx_insn *
-compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
- rtx_insn *cinsn)
+compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp,
+ rtx_code_label *label, int prob, rtx_insn *cinsn)
{
- rtx_insn *seq, *jump;
+ rtx_insn *seq;
+ rtx_jump_insn *jump;
rtx cond;
machine_mode mode;
@@ -816,8 +817,7 @@ compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
gcc_assert (rtx_equal_p (op0, XEXP (cond, 0)));
gcc_assert (rtx_equal_p (op1, XEXP (cond, 1)));
emit_jump_insn (copy_insn (PATTERN (cinsn)));
- jump = get_last_insn ();
- gcc_assert (JUMP_P (jump));
+ jump = as_a <rtx_jump_insn *> (get_last_insn ());
JUMP_LABEL (jump) = JUMP_LABEL (cinsn);
LABEL_NUSES (JUMP_LABEL (jump))++;
redirect_jump (jump, label, 0);
@@ -829,10 +829,9 @@ compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
op0 = force_operand (op0, NULL_RTX);
op1 = force_operand (op1, NULL_RTX);
do_compare_rtx_and_jump (op0, op1, comp, 0,
- mode, NULL_RTX, NULL_RTX, label, -1);
- jump = get_last_insn ();
- gcc_assert (JUMP_P (jump));
- JUMP_LABEL (jump) = label;
+ mode, NULL_RTX, NULL, label, -1);
+ jump = as_a <rtx_jump_insn *> (get_last_insn ());
+ jump->set_jump_target (label);
LABEL_NUSES (label)++;
}
add_int_reg_note (jump, REG_BR_PROB, prob);
@@ -1060,9 +1060,8 @@ emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
LRA_SUBREG_P (mem_pseudo) = 1;
}
}
- return as_a <rtx_insn *> (to_p
- ? gen_move_insn (mem_pseudo, val)
- : gen_move_insn (val, mem_pseudo));
+ return to_p ? gen_move_insn (mem_pseudo, val)
+ : gen_move_insn (val, mem_pseudo);
}
/* Process a special case insn (register move), return true if we
@@ -4766,7 +4765,7 @@ inherit_reload_reg (bool def_p, int original_regno,
" Inheritance reuse change %d->%d (bb%d):\n",
original_regno, REGNO (new_reg),
BLOCK_FOR_INSN (usage_insn)->index);
- dump_insn_slim (lra_dump_file, usage_insn);
+ dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
}
}
if (lra_dump_file != NULL)
@@ -5026,7 +5025,7 @@ split_reg (bool before_p, int original_regno, rtx_insn *insn,
{
fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
original_regno, REGNO (new_reg));
- dump_insn_slim (lra_dump_file, usage_insn);
+ dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
}
}
lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
@@ -790,8 +790,7 @@ schedule_reg_moves (partial_schedule_ptr ps)
move->old_reg = old_reg;
move->new_reg = gen_reg_rtx (GET_MODE (prev_reg));
move->num_consecutive_stages = distances[0] && distances[1] ? 2 : 1;
- move->insn = as_a <rtx_insn *> (gen_move_insn (move->new_reg,
- copy_rtx (prev_reg)));
+ move->insn = gen_move_insn (move->new_reg, copy_rtx (prev_reg));
bitmap_clear (move->uses);
prev_reg = move->new_reg;
@@ -1416,7 +1416,7 @@ expand_binop_directly (machine_mode mode, optab binoptab,
machine_mode mode0, mode1, tmp_mode;
struct expand_operand ops[3];
bool commutative_p;
- rtx pat;
+ rtx_insn *pat;
rtx xop0 = op0, xop1 = op1;
rtx swap;
@@ -1499,8 +1499,8 @@ expand_binop_directly (machine_mode mode, optab binoptab,
/* If PAT is composed of more than one insn, try to add an appropriate
REG_EQUAL note to it. If we can't because TEMP conflicts with an
operand, call expand_binop again, this time without a target. */
- if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
- && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
+ && ! add_equal_note (pat, ops[0].value,
optab_to_code (binoptab),
ops[1].value, ops[2].value))
{
@@ -3016,15 +3016,15 @@ expand_unop_direct (machine_mode mode, optab unoptab, rtx op0, rtx target,
struct expand_operand ops[2];
enum insn_code icode = optab_handler (unoptab, mode);
rtx_insn *last = get_last_insn ();
- rtx pat;
+ rtx_insn *pat;
create_output_operand (&ops[0], target, mode);
create_convert_operand_from (&ops[1], op0, mode, unsignedp);
pat = maybe_gen_insn (icode, 2, ops);
if (pat)
{
- if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
- && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
+ && ! add_equal_note (pat, ops[0].value,
optab_to_code (unoptab),
ops[1].value, NULL_RTX))
{
@@ -3508,7 +3508,7 @@ expand_abs (machine_mode mode, rtx op0, rtx target,
NO_DEFER_POP;
do_compare_rtx_and_jump (target, CONST0_RTX (mode), GE, 0, mode,
- NULL_RTX, NULL_RTX, op1, -1);
+ NULL_RTX, NULL, op1, -1);
op0 = expand_unop (mode, result_unsignedp ? neg_optab : negv_optab,
target, target, 0);
@@ -3817,7 +3817,7 @@ maybe_emit_unop_insn (enum insn_code icode, rtx target, rtx op0,
enum rtx_code code)
{
struct expand_operand ops[2];
- rtx pat;
+ rtx_insn *pat;
create_output_operand (&ops[0], target, GET_MODE (target));
create_input_operand (&ops[1], op0, GET_MODE (op0));
@@ -3825,10 +3825,9 @@ maybe_emit_unop_insn (enum insn_code icode, rtx target, rtx op0,
if (!pat)
return false;
- if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
&& code != UNKNOWN)
- add_equal_note (as_a <rtx_insn *> (pat), ops[0].value, code, ops[1].value,
- NULL_RTX);
+ add_equal_note (pat, ops[0].value, code, ops[1].value, NULL_RTX);
emit_insn (pat);
@@ -8370,13 +8369,13 @@ maybe_legitimize_operands (enum insn_code icode, unsigned int opno,
and emit any necessary set-up code. Return null and emit no
code on failure. */
-rtx
+rtx_insn *
maybe_gen_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops)
{
gcc_assert (nops == (unsigned int) insn_data[(int) icode].n_generator_args);
if (!maybe_legitimize_operands (icode, 0, nops, ops))
- return NULL_RTX;
+ return NULL;
switch (nops)
{
@@ -541,8 +541,8 @@ extern void create_convert_operand_from_type (struct expand_operand *op,
extern bool maybe_legitimize_operands (enum insn_code icode,
unsigned int opno, unsigned int nops,
struct expand_operand *ops);
-extern rtx maybe_gen_insn (enum insn_code icode, unsigned int nops,
- struct expand_operand *ops);
+extern rtx_insn *maybe_gen_insn (enum insn_code icode, unsigned int nops,
+ struct expand_operand *ops);
extern bool maybe_expand_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern bool maybe_expand_jump_insn (enum insn_code icode, unsigned int nops,
@@ -1115,8 +1115,8 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
/* Make sure we can generate a move from register avail_reg to
dest. */
- rtx_insn *move = as_a <rtx_insn *>
- (gen_move_insn (copy_rtx (dest), copy_rtx (avail_reg)));
+ rtx_insn *move = gen_move_insn (copy_rtx (dest),
+ copy_rtx (avail_reg));
extract_insn (move);
if (! constrain_operands (1, get_preferred_alternatives (insn,
pred_bb))
@@ -3066,7 +3066,7 @@ split_all_insns_noflow (void)
#ifdef HAVE_peephole2
struct peep2_insn_data
{
- rtx insn;
+ rtx_insn *insn;
regset live_before;
};
@@ -3082,7 +3082,7 @@ int peep2_current_count;
/* A non-insn marker indicating the last insn of the block.
The live_before regset for this element is correct, indicating
DF_LIVE_OUT for the block. */
-#define PEEP2_EOB pc_rtx
+#define PEEP2_EOB (static_cast<rtx_insn *> (pc_rtx))
/* Wrap N to fit into the peep2_insn_data buffer. */
@@ -3285,7 +3285,7 @@ peep2_reinit_state (regset live)
/* Indicate that all slots except the last holds invalid data. */
for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
- peep2_insn_data[i].insn = NULL_RTX;
+ peep2_insn_data[i].insn = NULL;
peep2_current_count = 0;
/* Indicate that the last slot contains live_after data. */
@@ -3313,7 +3313,7 @@ peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt
/* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
match more than one insn, or to be split into more than one insn. */
- old_insn = as_a <rtx_insn *> (peep2_insn_data[peep2_current].insn);
+ old_insn = peep2_insn_data[peep2_current].insn;
if (RTX_FRAME_RELATED_P (old_insn))
{
bool any_note = false;
@@ -3401,7 +3401,7 @@ peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt
rtx note;
j = peep2_buf_position (peep2_current + i);
- old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
+ old_insn = peep2_insn_data[j].insn;
if (!CALL_P (old_insn))
continue;
was_call = true;
@@ -3440,7 +3440,7 @@ peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt
while (++i <= match_len)
{
j = peep2_buf_position (peep2_current + i);
- old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
+ old_insn = peep2_insn_data[j].insn;
gcc_assert (!CALL_P (old_insn));
}
break;
@@ -3452,7 +3452,7 @@ peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt
for (i = match_len; i >= 0; --i)
{
int j = peep2_buf_position (peep2_current + i);
- old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
+ old_insn = peep2_insn_data[j].insn;
as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
if (as_note)
@@ -3463,7 +3463,7 @@ peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt
eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
/* Replace the old sequence with the new. */
- rtx_insn *peepinsn = as_a <rtx_insn *> (peep2_insn_data[i].insn);
+ rtx_insn *peepinsn = peep2_insn_data[i].insn;
last = emit_insn_after_setloc (attempt,
peep2_insn_data[i].insn,
INSN_LOCATION (peepinsn));
@@ -3580,7 +3580,7 @@ peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
add more instructions to the buffer. */
static bool
-peep2_fill_buffer (basic_block bb, rtx insn, regset live)
+peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
{
int pos;
@@ -3606,7 +3606,7 @@ peep2_fill_buffer (basic_block bb, rtx insn, regset live)
COPY_REG_SET (peep2_insn_data[pos].live_before, live);
peep2_current_count++;
- df_simulate_one_insn_forwards (bb, as_a <rtx_insn *> (insn), live);
+ df_simulate_one_insn_forwards (bb, insn, live);
return true;
}
@@ -276,43 +276,43 @@ typedef const char * (*insn_output_fn) (rtx *, rtx_insn *);
struct insn_gen_fn
{
- typedef rtx (*f0) (void);
- typedef rtx (*f1) (rtx);
- typedef rtx (*f2) (rtx, rtx);
- typedef rtx (*f3) (rtx, rtx, rtx);
- typedef rtx (*f4) (rtx, rtx, rtx, rtx);
- typedef rtx (*f5) (rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f6) (rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f7) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f8) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f9) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f10) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f11) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f12) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f13) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f14) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f15) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- typedef rtx (*f16) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f0) (void);
+ typedef rtx_insn * (*f1) (rtx);
+ typedef rtx_insn * (*f2) (rtx, rtx);
+ typedef rtx_insn * (*f3) (rtx, rtx, rtx);
+ typedef rtx_insn * (*f4) (rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f5) (rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f6) (rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f7) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f8) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f9) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f10) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f11) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f12) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f13) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f14) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f15) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ typedef rtx_insn * (*f16) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
typedef f0 stored_funcptr;
- rtx operator () (void) const { return ((f0)func) (); }
- rtx operator () (rtx a0) const { return ((f1)func) (a0); }
- rtx operator () (rtx a0, rtx a1) const { return ((f2)func) (a0, a1); }
- rtx operator () (rtx a0, rtx a1, rtx a2) const { return ((f3)func) (a0, a1, a2); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3) const { return ((f4)func) (a0, a1, a2, a3); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4) const { return ((f5)func) (a0, a1, a2, a3, a4); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5) const { return ((f6)func) (a0, a1, a2, a3, a4, a5); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6) const { return ((f7)func) (a0, a1, a2, a3, a4, a5, a6); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7) const { return ((f8)func) (a0, a1, a2, a3, a4, a5, a6, a7); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8) const { return ((f9)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9) const { return ((f10)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10) const { return ((f11)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11) const { return ((f12)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12) const { return ((f13)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13) const { return ((f14)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13, rtx a14) const { return ((f15)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14); }
- rtx operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13, rtx a14, rtx a15) const { return ((f16)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15); }
+ rtx_insn * operator () (void) const { return ((f0)func) (); }
+ rtx_insn * operator () (rtx a0) const { return ((f1)func) (a0); }
+ rtx_insn * operator () (rtx a0, rtx a1) const { return ((f2)func) (a0, a1); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2) const { return ((f3)func) (a0, a1, a2); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3) const { return ((f4)func) (a0, a1, a2, a3); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4) const { return ((f5)func) (a0, a1, a2, a3, a4); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5) const { return ((f6)func) (a0, a1, a2, a3, a4, a5); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6) const { return ((f7)func) (a0, a1, a2, a3, a4, a5, a6); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7) const { return ((f8)func) (a0, a1, a2, a3, a4, a5, a6, a7); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8) const { return ((f9)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9) const { return ((f10)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10) const { return ((f11)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11) const { return ((f12)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12) const { return ((f13)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13) const { return ((f14)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13, rtx a14) const { return ((f15)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14); }
+ rtx_insn * operator () (rtx a0, rtx a1, rtx a2, rtx a3, rtx a4, rtx a5, rtx a6, rtx a7, rtx a8, rtx a9, rtx a10, rtx a11, rtx a12, rtx a13, rtx a14, rtx a15) const { return ((f16)func) (a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15); }
// This is for compatibility of code that invokes functions like
// (*funcptr) (arg)
@@ -439,7 +439,7 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res,
for (insn = target; insn; insn = next_insn)
{
- rtx_insn *this_jump_insn = insn;
+ rtx_insn *this_insn = insn;
next_insn = NEXT_INSN (insn);
@@ -487,8 +487,8 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res,
of a call, so search for a JUMP_INSN in any position. */
for (i = 0; i < seq->len (); i++)
{
- this_jump_insn = seq->insn (i);
- if (JUMP_P (this_jump_insn))
+ this_insn = seq->insn (i);
+ if (JUMP_P (this_insn))
break;
}
}
@@ -497,14 +497,14 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res,
break;
}
- if (JUMP_P (this_jump_insn))
+ if (rtx_jump_insn *this_jump_insn = dyn_cast <rtx_jump_insn *> (this_insn))
{
if (jump_count++ < 10)
{
if (any_uncondjump_p (this_jump_insn)
|| ANY_RETURN_P (PATTERN (this_jump_insn)))
{
- rtx lab_or_return = JUMP_LABEL (this_jump_insn);
+ rtx lab_or_return = this_jump_insn->jump_label ();
if (ANY_RETURN_P (lab_or_return))
next_insn = NULL;
else
@@ -577,10 +577,10 @@ find_dead_or_set_registers (rtx_insn *target, struct resources *res,
AND_COMPL_HARD_REG_SET (scratch, needed.regs);
AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
- if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
- find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
- &target_res, 0, jump_count,
- target_set, needed);
+ if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
+ find_dead_or_set_registers
+ (this_jump_insn->jump_target (),
+ &target_res, 0, jump_count, target_set, needed);
find_dead_or_set_registers (next_insn,
&fallthrough_res, 0, jump_count,
set, needed);
@@ -546,6 +546,7 @@ class GTY(()) rtx_nonjump_insn : public rtx_insn
class GTY(()) rtx_jump_insn : public rtx_insn
{
+public:
/* No extra fields, but adds the invariant:
JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN)
i.e. an instruction that can possibly jump.
@@ -553,6 +554,18 @@ class GTY(()) rtx_jump_insn : public rtx_insn
This is an instance of:
DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeiie0", RTX_INSN)
from rtl.def. */
+
+ /* Returns jump target of this instruction. */
+
+ inline rtx jump_label () const;
+
+ /* Returns jump target cast to rtx_insn *. */
+
+ inline rtx_insn *jump_target () const;
+
+ /* Set jump target. */
+
+ inline void set_jump_target (rtx_insn *);
};
class GTY(()) rtx_call_insn : public rtx_insn
@@ -827,6 +840,14 @@ is_a_helper <rtx_debug_insn *>::test (rtx rt)
template <>
template <>
inline bool
+is_a_helper <rtx_debug_insn *>::test (rtx_insn *insn)
+{
+ return DEBUG_INSN_P (insn);
+}
+
+template <>
+template <>
+inline bool
is_a_helper <rtx_nonjump_insn *>::test (rtx rt)
{
return NONJUMP_INSN_P (rt);
@@ -843,6 +864,14 @@ is_a_helper <rtx_jump_insn *>::test (rtx rt)
template <>
template <>
inline bool
+is_a_helper <rtx_jump_insn *>::test (rtx_insn *insn)
+{
+ return JUMP_P (insn);
+}
+
+template <>
+template <>
+inline bool
is_a_helper <rtx_call_insn *>::test (rtx rt)
{
return CALL_P (rt);
@@ -1681,6 +1710,23 @@ inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn)
return safe_as_a <rtx_insn *> (JUMP_LABEL (insn));
}
+/* Methods of rtx_jump_insn. */
+
+inline rtx rtx_jump_insn::jump_label () const
+{
+ return JUMP_LABEL (this);
+}
+
+inline rtx_insn *rtx_jump_insn::jump_target () const
+{
+ return safe_as_a <rtx_insn *> (JUMP_LABEL (this));
+}
+
+inline void rtx_jump_insn::set_jump_target (rtx_insn *target)
+{
+ JUMP_LABEL(this) = target;
+}
+
/* Once basic blocks are found, each CODE_LABEL starts a chain that
goes through all the LABEL_REFs that jump to that label. The chain
eventually winds up at the CODE_LABEL: it is circular. */
@@ -2662,7 +2708,7 @@ extern rtx_insn *emit_debug_insn_before (rtx, rtx);
extern rtx_insn *emit_debug_insn_before_noloc (rtx, rtx);
extern rtx_insn *emit_debug_insn_before_setloc (rtx, rtx, int);
extern rtx_barrier *emit_barrier_before (rtx);
-extern rtx_insn *emit_label_before (rtx, rtx_insn *);
+extern rtx_code_label *emit_label_before (rtx, rtx_insn *);
extern rtx_note *emit_note_before (enum insn_note, rtx);
extern rtx_insn *emit_insn_after (rtx, rtx);
extern rtx_insn *emit_insn_after_noloc (rtx, rtx, basic_block);
@@ -2683,7 +2729,7 @@ extern rtx_insn *emit_insn (rtx);
extern rtx_insn *emit_debug_insn (rtx);
extern rtx_insn *emit_jump_insn (rtx);
extern rtx_insn *emit_call_insn (rtx);
-extern rtx_insn *emit_label (rtx);
+extern rtx_code_label *emit_label (rtx);
extern rtx_jump_table_data *emit_jump_table_data (rtx);
extern rtx_barrier *emit_barrier (void);
extern rtx_note *emit_note (enum insn_note);
@@ -3336,14 +3382,14 @@ extern int eh_returnjump_p (rtx_insn *);
extern int onlyjump_p (const rtx_insn *);
extern int only_sets_cc0_p (const_rtx);
extern int sets_cc0_p (const_rtx);
-extern int invert_jump_1 (rtx_insn *, rtx);
-extern int invert_jump (rtx_insn *, rtx, int);
+extern int invert_jump_1 (rtx_jump_insn *, rtx);
+extern int invert_jump (rtx_jump_insn *, rtx, int);
extern int rtx_renumbered_equal_p (const_rtx, const_rtx);
extern int true_regnum (const_rtx);
extern unsigned int reg_or_subregno (const_rtx);
extern int redirect_jump_1 (rtx, rtx);
-extern void redirect_jump_2 (rtx, rtx, rtx, int, int);
-extern int redirect_jump (rtx, rtx, int);
+extern void redirect_jump_2 (rtx_jump_insn *, rtx, rtx, int, int);
+extern int redirect_jump (rtx_jump_insn *, rtx, int);
extern void rebuild_jump_labels (rtx_insn *);
extern void rebuild_jump_labels_chain (rtx_insn *);
extern rtx reversed_comparison (const_rtx, machine_mode);
@@ -3426,7 +3472,7 @@ extern void print_inline_rtx (FILE *, const_rtx, int);
not be in sched-vis.c but in rtl.c, because they are not only used
by the scheduler anymore but for all "slim" RTL dumping. */
extern void dump_value_slim (FILE *, const_rtx, int);
-extern void dump_insn_slim (FILE *, const_rtx);
+extern void dump_insn_slim (FILE *, const rtx_insn *);
extern void dump_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
int, int);
extern void print_value (pretty_printer *, const_rtx, int);
@@ -2914,7 +2914,8 @@ rtx_referenced_p (const_rtx x, const_rtx body)
bool
tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
{
- rtx label, table;
+ rtx label;
+ rtx_insn *table;
if (!JUMP_P (insn))
return false;
@@ -2650,7 +2650,7 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
case MEM:
{
/* Reading memory. */
- rtx u;
+ rtx_insn_list *u;
rtx_insn_list *pending;
rtx_expr_list *pending_mem;
rtx t = x;
@@ -2701,11 +2701,10 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
pending_mem = pending_mem->next ();
}
- for (u = deps->last_pending_memory_flush; u; u = XEXP (u, 1))
- add_dependence (insn, as_a <rtx_insn *> (XEXP (u, 0)),
- REG_DEP_ANTI);
+ for (u = deps->last_pending_memory_flush; u; u = u->next ())
+ add_dependence (insn, u->insn (), REG_DEP_ANTI);
- for (u = deps->pending_jump_insns; u; u = XEXP (u, 1))
+ for (u = deps->pending_jump_insns; u; u = u->next ())
if (deps_may_trap_p (x))
{
if ((sched_deps_info->generate_spec_deps)
@@ -2714,11 +2713,10 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
ds_t ds = set_dep_weak (DEP_ANTI, BEGIN_CONTROL,
MAX_DEP_WEAK);
- note_dep (as_a <rtx_insn *> (XEXP (u, 0)), ds);
+ note_dep (u->insn (), ds);
}
else
- add_dependence (insn, as_a <rtx_insn *> (XEXP (u, 0)),
- REG_DEP_CONTROL);
+ add_dependence (insn, u->insn (), REG_DEP_CONTROL);
}
}
@@ -3089,7 +3087,7 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx_insn *insn)
if (DEBUG_INSN_P (insn))
{
rtx_insn *prev = deps->last_debug_insn;
- rtx u;
+ rtx_insn_list *u;
if (!deps->readonly)
deps->last_debug_insn = insn;
@@ -3101,8 +3099,8 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx_insn *insn)
REG_DEP_ANTI, false);
if (!sel_sched_p ())
- for (u = deps->last_pending_memory_flush; u; u = XEXP (u, 1))
- add_dependence (insn, as_a <rtx_insn *> (XEXP (u, 0)), REG_DEP_ANTI);
+ for (u = deps->last_pending_memory_flush; u; u = u->next ())
+ add_dependence (insn, u->insn (), REG_DEP_ANTI);
EXECUTE_IF_SET_IN_REG_SET (reg_pending_uses, 0, i, rsi)
{
@@ -67,7 +67,7 @@ along with GCC; see the file COPYING3. If not see
pointer, via str_pattern_slim, but this usage is discouraged. */
/* For insns we print patterns, and for some patterns we print insns... */
-static void print_insn_with_notes (pretty_printer *, const_rtx);
+static void print_insn_with_notes (pretty_printer *, const rtx_insn *);
/* This recognizes rtx'en classified as expressions. These are always
represent some action on values or results of other expression, that
@@ -669,7 +669,7 @@ print_pattern (pretty_printer *pp, const_rtx x, int verbose)
with their INSN_UIDs. */
void
-print_insn (pretty_printer *pp, const_rtx x, int verbose)
+print_insn (pretty_printer *pp, const rtx_insn *x, int verbose)
{
if (verbose)
{
@@ -787,7 +787,7 @@ print_insn (pretty_printer *pp, const_rtx x, int verbose)
note attached to the instruction. */
static void
-print_insn_with_notes (pretty_printer *pp, const_rtx x)
+print_insn_with_notes (pretty_printer *pp, const rtx_insn *x)
{
pp_string (pp, print_rtx_head);
print_insn (pp, x, 1);
@@ -823,7 +823,7 @@ dump_value_slim (FILE *f, const_rtx x, int verbose)
/* Emit a slim dump of X (an insn) to the file F, including any register
note attached to the instruction. */
void
-dump_insn_slim (FILE *f, const_rtx x)
+dump_insn_slim (FILE *f, const rtx_insn *x)
{
pretty_printer rtl_slim_pp;
rtl_slim_pp.buffer->stream = f;
@@ -893,9 +893,9 @@ str_pattern_slim (const_rtx x)
}
/* Emit a slim dump of X (an insn) to stderr. */
-extern void debug_insn_slim (const_rtx);
+extern void debug_insn_slim (const rtx_insn *);
DEBUG_FUNCTION void
-debug_insn_slim (const_rtx x)
+debug_insn_slim (const rtx_insn *x)
{
dump_insn_slim (stderr, x);
}
@@ -135,12 +135,12 @@ static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
-static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
+static void emit_case_nodes (rtx, case_node_ptr, rtx_code_label *, int, tree);
/* Return the rtx-label that corresponds to a LABEL_DECL,
creating it if necessary. */
-rtx
+rtx_insn *
label_rtx (tree label)
{
gcc_assert (TREE_CODE (label) == LABEL_DECL);
@@ -153,15 +153,15 @@ label_rtx (tree label)
LABEL_PRESERVE_P (r) = 1;
}
- return DECL_RTL (label);
+ return as_a <rtx_insn *> (DECL_RTL (label));
}
/* As above, but also put it on the forced-reference list of the
function that contains it. */
-rtx
+rtx_insn *
force_label_rtx (tree label)
{
- rtx_insn *ref = as_a <rtx_insn *> (label_rtx (label));
+ rtx_insn *ref = label_rtx (label);
tree function = decl_function_context (label);
gcc_assert (function);
@@ -170,6 +170,14 @@ force_label_rtx (tree label)
return ref;
}
+/* As label_rtx, but ensures (in check build), that returned value is
+ an existing label (i.e. rtx with code CODE_LABEL). */
+rtx_code_label *
+jump_target_rtx (tree label)
+{
+ return as_a <rtx_code_label *> (label_rtx (label));
+}
+
/* Add an unconditional jump to LABEL as the next sequential instruction. */
void
@@ -196,7 +204,7 @@ emit_jump (rtx label)
void
expand_label (tree label)
{
- rtx_insn *label_r = as_a <rtx_insn *> (label_rtx (label));
+ rtx_code_label *label_r = jump_target_rtx (label);
do_pending_stack_adjust ();
emit_label (label_r);
@@ -705,7 +713,7 @@ resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
void
expand_naked_return (void)
{
- rtx end_label;
+ rtx_code_label *end_label;
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
@@ -720,12 +728,12 @@ expand_naked_return (void)
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
is the probability of jumping to LABEL. */
static void
-do_jump_if_equal (machine_mode mode, rtx op0, rtx op1, rtx label,
+do_jump_if_equal (machine_mode mode, rtx op0, rtx op1, rtx_code_label *label,
int unsignedp, int prob)
{
gcc_assert (prob <= REG_BR_PROB_BASE);
do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
- NULL_RTX, NULL_RTX, label, prob);
+ NULL_RTX, NULL, label, prob);
}
/* Do the insertion of a case label into case_list. The labels are
@@ -882,8 +890,8 @@ expand_switch_as_decision_tree_p (tree range,
static void
emit_case_decision_tree (tree index_expr, tree index_type,
- struct case_node *case_list, rtx default_label,
- int default_prob)
+ case_node_ptr case_list, rtx_code_label *default_label,
+ int default_prob)
{
rtx index = expand_normal (index_expr);
@@ -1141,7 +1149,7 @@ void
expand_case (gswitch *stmt)
{
tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
- rtx default_label = NULL_RTX;
+ rtx_code_label *default_label = NULL;
unsigned int count, uniq;
int i;
int ncases = gimple_switch_num_labels (stmt);
@@ -1173,7 +1181,7 @@ expand_case (gswitch *stmt)
do_pending_stack_adjust ();
/* Find the default case target label. */
- default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
+ default_label = jump_target_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
edge default_edge = EDGE_SUCC (bb, 0);
int default_prob = default_edge->probability;
@@ -1323,7 +1331,7 @@ expand_sjlj_dispatch_table (rtx dispatch_index,
for (int i = 0; i < ncases; i++)
{
tree elt = dispatch_table[i];
- rtx lab = label_rtx (CASE_LABEL (elt));
+ rtx_code_label *lab = jump_target_rtx (CASE_LABEL (elt));
do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
force_expand_binop (index_mode, sub_optab,
index, CONST1_RTX (index_mode),
@@ -1592,7 +1600,7 @@ node_is_bounded (case_node_ptr node, tree index_type)
tests for the value 50, then this node need not test anything. */
static void
-emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
+emit_case_nodes (rtx index, case_node_ptr node, rtx_code_label *default_label,
int default_prob, tree index_type)
{
/* If INDEX has an unsigned type, we must make unsigned branches. */
@@ -1620,7 +1628,8 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
convert_modes (mode, imode,
expand_normal (node->low),
unsignedp),
- label_rtx (node->code_label), unsignedp, probability);
+ jump_target_rtx (node->code_label),
+ unsignedp, probability);
/* Since this case is taken at this point, reduce its weight from
subtree_weight. */
subtree_prob -= prob;
@@ -1687,7 +1696,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
convert_modes (mode, imode,
expand_normal (node->right->low),
unsignedp),
- label_rtx (node->right->code_label),
+ jump_target_rtx (node->right->code_label),
unsignedp, probability);
/* See if the value matches what the left hand side
@@ -1699,7 +1708,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
convert_modes (mode, imode,
expand_normal (node->left->low),
unsignedp),
- label_rtx (node->left->code_label),
+ jump_target_rtx (node->left->code_label),
unsignedp, probability);
}
@@ -1786,7 +1795,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
(mode, imode,
expand_normal (node->right->low),
unsignedp),
- label_rtx (node->right->code_label), unsignedp, probability);
+ jump_target_rtx (node->right->code_label), unsignedp, probability);
}
}
@@ -1828,7 +1837,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
(mode, imode,
expand_normal (node->left->low),
unsignedp),
- label_rtx (node->left->code_label), unsignedp, probability);
+ jump_target_rtx (node->left->code_label), unsignedp, probability);
}
}
}
@@ -2051,7 +2060,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
mode, 1, default_label, probability);
}
- emit_jump (label_rtx (node->code_label));
+ emit_jump (jump_target_rtx (node->code_label));
}
}
}
@@ -31,13 +31,18 @@ extern tree resolve_asm_operand_names (tree, tree, tree, tree);
extern tree tree_overlaps_hard_reg_set (tree, HARD_REG_SET *);
#endif
-/* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
-extern rtx label_rtx (tree);
+/* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary.
+ If label was deleted, the corresponding note
+ (NOTE_INSN_DELETED{_DEBUG,}_LABEL) insn will be returned. */
+extern rtx_insn *label_rtx (tree);
/* As label_rtx, but additionally the label is placed on the forced label
list of its containing function (i.e. it is treated as reachable even
if how is not obvious). */
-extern rtx force_label_rtx (tree);
+extern rtx_insn *force_label_rtx (tree);
+
+/* As label_rtx, but checks that label was not deleted. */
+extern rtx_code_label *jump_target_rtx (tree);
/* Expand a GIMPLE_SWITCH statement. */
extern void expand_case (gswitch *);
@@ -813,7 +813,7 @@ insert_store (struct st_expr * expr, edge e)
return 0;
reg = expr->reaching_reg;
- insn = as_a <rtx_insn *> (gen_move_insn (copy_rtx (expr->pattern), reg));
+ insn = gen_move_insn (copy_rtx (expr->pattern), reg);
/* If we are inserting this expression on ALL predecessor edges of a BB,
insert it at the start of the BB, and reset the insert bits on the other
@@ -954,7 +954,7 @@ replace_store_insn (rtx reg, rtx_insn *del, basic_block bb,
rtx mem, note, set, ptr;
mem = smexpr->pattern;
- insn = as_a <rtx_insn *> (gen_move_insn (reg, SET_SRC (single_set (del))));
+ insn = gen_move_insn (reg, SET_SRC (single_set (del)));
for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
if (XEXP (ptr, 0) == del)