@@ -1280,8 +1280,7 @@ fp_prec_to_size (int prec)
{
machine_mode mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
if (GET_MODE_PRECISION (mode) == prec)
return GET_MODE_BITSIZE (mode);
@@ -1295,8 +1294,7 @@ fp_size_to_prec (int size)
{
machine_mode mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
if (GET_MODE_BITSIZE (mode) == size)
return GET_MODE_PRECISION (mode);
@@ -2766,7 +2766,7 @@ expand_builtin_strlen (tree exp, rtx target,
tree src = CALL_EXPR_ARG (exp, 0);
rtx src_reg;
rtx_insn *before_strlen;
- machine_mode insn_mode = target_mode;
+ machine_mode insn_mode;
enum insn_code icode = CODE_FOR_nothing;
unsigned int align;
@@ -2794,13 +2794,11 @@ expand_builtin_strlen (tree exp, rtx target,
return NULL_RTX;
/* Bail out if we can't compute strlen in the right mode. */
- while (insn_mode != VOIDmode)
+ FOR_EACH_MODE_FROM (insn_mode, target_mode)
{
icode = optab_handler (strlen_optab, insn_mode);
if (icode != CODE_FOR_nothing)
break;
-
- insn_mode = GET_MODE_WIDER_MODE (insn_mode);
}
if (insn_mode == VOIDmode)
return NULL_RTX;
@@ -2119,13 +2119,14 @@ tree
c_common_fixed_point_type_for_size (unsigned int ibit, unsigned int fbit,
int unsignedp, int satp)
{
- machine_mode mode;
+ enum mode_class mclass;
if (ibit == 0)
- mode = unsignedp ? UQQmode : QQmode;
+ mclass = unsignedp ? MODE_UFRACT : MODE_FRACT;
else
- mode = unsignedp ? UHAmode : HAmode;
+ mclass = unsignedp ? MODE_UACCUM : MODE_ACCUM;
- for (; mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode))
+ machine_mode mode;
+ FOR_EACH_MODE_IN_CLASS (mode, mclass)
if (GET_MODE_IBIT (mode) >= ibit && GET_MODE_FBIT (mode) >= fbit)
break;
@@ -1184,9 +1184,8 @@ c_cpp_builtins (cpp_reader *pfile)
if (flag_building_libgcc)
{
/* Properties of floating-point modes for libgcc2.c. */
- for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ machine_mode mode;
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
{
const char *name = GET_MODE_NAME (mode);
char *macro_name
@@ -11792,9 +11792,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
}
else if (c0 == c1)
- for (tmode = GET_CLASS_NARROWEST_MODE
- (GET_MODE_CLASS (GET_MODE (op0)));
- tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
+ FOR_EACH_MODE_UNTIL (tmode, GET_MODE (op0))
if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
{
op0 = gen_lowpart_or_truncate (tmode, inner_op0);
@@ -12667,75 +12665,81 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD
&& ! have_insn_for (COMPARE, mode))
- for (tmode = GET_MODE_WIDER_MODE (mode);
- (tmode != VOIDmode && HWI_COMPUTABLE_MODE_P (tmode));
- tmode = GET_MODE_WIDER_MODE (tmode))
- if (have_insn_for (COMPARE, tmode))
- {
- int zero_extended;
-
- /* If this is a test for negative, we can make an explicit
- test of the sign bit. Test this first so we can use
- a paradoxical subreg to extend OP0. */
+ FOR_EACH_WIDER_MODE (tmode, mode)
+ {
+ if (!HWI_COMPUTABLE_MODE_P (tmode))
+ break;
+ if (have_insn_for (COMPARE, tmode))
+ {
+ int zero_extended;
- if (op1 == const0_rtx && (code == LT || code == GE)
- && HWI_COMPUTABLE_MODE_P (mode))
- {
- unsigned HOST_WIDE_INT sign
- = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
- op0 = simplify_gen_binary (AND, tmode,
- gen_lowpart (tmode, op0),
- gen_int_mode (sign, tmode));
- code = (code == LT) ? NE : EQ;
- break;
- }
+ /* If this is a test for negative, we can make an explicit
+ test of the sign bit. Test this first so we can use
+ a paradoxical subreg to extend OP0. */
- /* If the only nonzero bits in OP0 and OP1 are those in the
- narrower mode and this is an equality or unsigned comparison,
- we can use the wider mode. Similarly for sign-extended
- values, in which case it is true for all comparisons. */
- zero_extended = ((code == EQ || code == NE
- || code == GEU || code == GTU
- || code == LEU || code == LTU)
- && (nonzero_bits (op0, tmode)
- & ~GET_MODE_MASK (mode)) == 0
- && ((CONST_INT_P (op1)
- || (nonzero_bits (op1, tmode)
- & ~GET_MODE_MASK (mode)) == 0)));
-
- if (zero_extended
- || ((num_sign_bit_copies (op0, tmode)
- > (unsigned int) (GET_MODE_PRECISION (tmode)
- - GET_MODE_PRECISION (mode)))
- && (num_sign_bit_copies (op1, tmode)
- > (unsigned int) (GET_MODE_PRECISION (tmode)
- - GET_MODE_PRECISION (mode)))))
- {
- /* If OP0 is an AND and we don't have an AND in MODE either,
- make a new AND in the proper mode. */
- if (GET_CODE (op0) == AND
- && !have_insn_for (AND, mode))
+ if (op1 == const0_rtx && (code == LT || code == GE)
+ && HWI_COMPUTABLE_MODE_P (mode))
+ {
+ unsigned HOST_WIDE_INT sign
+ = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
op0 = simplify_gen_binary (AND, tmode,
- gen_lowpart (tmode,
- XEXP (op0, 0)),
- gen_lowpart (tmode,
- XEXP (op0, 1)));
- else
- {
- if (zero_extended)
- {
- op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
- op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
- }
- else
- {
- op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
- op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
- }
- break;
- }
- }
- }
+ gen_lowpart (tmode, op0),
+ gen_int_mode (sign, tmode));
+ code = (code == LT) ? NE : EQ;
+ break;
+ }
+
+ /* If the only nonzero bits in OP0 and OP1 are those in the
+ narrower mode and this is an equality or unsigned comparison,
+ we can use the wider mode. Similarly for sign-extended
+ values, in which case it is true for all comparisons. */
+ zero_extended = ((code == EQ || code == NE
+ || code == GEU || code == GTU
+ || code == LEU || code == LTU)
+ && (nonzero_bits (op0, tmode)
+ & ~GET_MODE_MASK (mode)) == 0
+ && ((CONST_INT_P (op1)
+ || (nonzero_bits (op1, tmode)
+ & ~GET_MODE_MASK (mode)) == 0)));
+
+ if (zero_extended
+ || ((num_sign_bit_copies (op0, tmode)
+ > (unsigned int) (GET_MODE_PRECISION (tmode)
+ - GET_MODE_PRECISION (mode)))
+ && (num_sign_bit_copies (op1, tmode)
+ > (unsigned int) (GET_MODE_PRECISION (tmode)
+ - GET_MODE_PRECISION (mode)))))
+ {
+ /* If OP0 is an AND and we don't have an AND in MODE either,
+ make a new AND in the proper mode. */
+ if (GET_CODE (op0) == AND
+ && !have_insn_for (AND, mode))
+ op0 = simplify_gen_binary (AND, tmode,
+ gen_lowpart (tmode,
+ XEXP (op0, 0)),
+ gen_lowpart (tmode,
+ XEXP (op0, 1)));
+ else
+ {
+ if (zero_extended)
+ {
+ op0 = simplify_gen_unary (ZERO_EXTEND, tmode,
+ op0, mode);
+ op1 = simplify_gen_unary (ZERO_EXTEND, tmode,
+ op1, mode);
+ }
+ else
+ {
+ op0 = simplify_gen_unary (SIGN_EXTEND, tmode,
+ op0, mode);
+ op1 = simplify_gen_unary (SIGN_EXTEND, tmode,
+ op1, mode);
+ }
+ break;
+ }
+ }
+ }
+ }
/* We may have changed the comparison operands. Re-canonicalize. */
if (swap_commutative_operands_p (op0, op1))
@@ -8610,7 +8610,7 @@ type_natural_mode (const_tree type, const CUMULATIVE_ARGS *cum,
mode = MIN_MODE_VECTOR_INT;
/* Get the mode which has this inner mode and number of units. */
- for (; mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_FROM (mode, mode)
if (GET_MODE_NUNITS (mode) == TYPE_VECTOR_SUBPARTS (type)
&& GET_MODE_INNER (mode) == innermode)
{
@@ -4845,12 +4845,11 @@ cse_insn (rtx_insn *insn)
{
machine_mode wider_mode;
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode
- && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
- && src_related == 0;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
+ if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
+ break;
+
struct table_elt *const_elt
= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
@@ -4864,6 +4863,9 @@ cse_insn (rtx_insn *insn)
src_related = gen_lowpart (mode, const_elt->exp);
break;
}
+
+ if (src_related != 0)
+ break;
}
}
@@ -4880,10 +4882,11 @@ cse_insn (rtx_insn *insn)
machine_mode tmode;
rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
- for (tmode = GET_MODE_WIDER_MODE (mode);
- GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
- tmode = GET_MODE_WIDER_MODE (tmode))
+ FOR_EACH_WIDER_MODE (tmode, mode)
{
+ if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
+ break;
+
rtx inner = gen_lowpart (tmode, XEXP (src, 0));
struct table_elt *larger_elt;
@@ -4930,12 +4933,13 @@ cse_insn (rtx_insn *insn)
PUT_CODE (memory_extend_rtx, extend_op);
XEXP (memory_extend_rtx, 0) = src;
- for (tmode = GET_MODE_WIDER_MODE (mode);
- GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
- tmode = GET_MODE_WIDER_MODE (tmode))
+ FOR_EACH_WIDER_MODE (tmode, mode)
{
struct table_elt *larger_elt;
+ if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
+ break;
+
PUT_MODE (memory_extend_rtx, tmode);
larger_elt = lookup (memory_extend_rtx,
HASH (memory_extend_rtx, tmode), tmode);
@@ -1583,15 +1583,17 @@ find_shift_sequence (int access_size,
justify the value we want to read but is available in one insn on
the machine. */
- for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
- MODE_INT);
- GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
- new_mode = GET_MODE_WIDER_MODE (new_mode))
+ FOR_EACH_MODE_FROM (new_mode,
+ smallest_mode_for_size (access_size * BITS_PER_UNIT,
+ MODE_INT))
{
rtx target, new_reg, new_lhs;
rtx_insn *shift_seq, *insn;
int cost;
+ if (GET_MODE_BITSIZE (new_mode) > BITS_PER_WORD)
+ break;
+
/* If a constant was stored into memory, try to simplify it here,
otherwise the cost of the shift might preclude this optimization
e.g. at -Os, even when no actual shift will be needed. */
@@ -5842,9 +5842,8 @@ init_derived_machine_modes (void)
byte_mode = VOIDmode;
word_mode = VOIDmode;
- for (machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ machine_mode mode;
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
{
if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
&& byte_mode == VOIDmode)
@@ -5926,23 +5925,17 @@ init_emit_once (void)
const REAL_VALUE_TYPE *const r =
(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
const_tiny_rtx[i][(int) mode] =
const_double_from_real_value (*r, mode);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
const_tiny_rtx[i][(int) mode] =
const_double_from_real_value (*r, mode);
const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
for (mode = MIN_MODE_PARTIAL_INT;
@@ -5953,52 +5946,40 @@ init_emit_once (void)
const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
const_tiny_rtx[3][(int) mode] = constm1_rtx;
for (mode = MIN_MODE_PARTIAL_INT;
mode <= MAX_MODE_PARTIAL_INT;
mode = (machine_mode_enum) ((int) mode + 1))
const_tiny_rtx[3][(int) mode] = constm1_rtx;
-
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
{
rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
{
rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FRACT)
{
FCONST0 (mode).data.high = 0;
FCONST0 (mode).data.low = 0;
@@ -6007,9 +5988,7 @@ init_emit_once (void)
FCONST0 (mode), mode);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_UFRACT)
{
FCONST0 (mode).data.high = 0;
FCONST0 (mode).data.low = 0;
@@ -6018,9 +5997,7 @@ init_emit_once (void)
FCONST0 (mode), mode);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_ACCUM)
{
FCONST0 (mode).data.high = 0;
FCONST0 (mode).data.low = 0;
@@ -6040,9 +6017,7 @@ init_emit_once (void)
FCONST1 (mode), mode);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_UACCUM)
{
FCONST0 (mode).data.high = 0;
FCONST0 (mode).data.low = 0;
@@ -6062,31 +6037,23 @@ init_emit_once (void)
FCONST1 (mode), mode);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
@@ -6100,9 +6067,7 @@ init_emit_once (void)
if (STORE_FLAG_VALUE == 1)
const_tiny_rtx[1][(int) BImode] = const1_rtx;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_POINTER_BOUNDS)
{
wide_int wi_zero = wi::zero (GET_MODE_PRECISION (mode));
const_tiny_rtx[0][mode] = immed_wide_int_const (wi_zero, mode);
@@ -1917,9 +1917,7 @@ hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
since the value of bytes will then be large enough that no
mode will match anyway. */
- for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- tmpmode != VOIDmode;
- tmpmode = GET_MODE_WIDER_MODE (tmpmode))
+ FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
{
/* Have we found a large enough mode? */
if (GET_MODE_SIZE (tmpmode) >= bytes)
@@ -204,8 +204,7 @@ init_expmed_one_mode (struct init_expmed_rtl *all,
if (SCALAR_INT_MODE_P (mode))
{
- for (mode_from = MIN_MODE_INT; mode_from <= MAX_MODE_INT;
- mode_from = (machine_mode_enum) (mode_from + 1))
+ FOR_EACH_MODE_IN_CLASS (mode_from, MODE_INT)
init_expmed_one_conv (all, mode, mode_from, speed);
}
if (GET_MODE_CLASS (mode) == MODE_INT)
@@ -1580,7 +1579,7 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
else
new_mode = MIN_MODE_VECTOR_INT;
- for (; new_mode != VOIDmode ; new_mode = GET_MODE_WIDER_MODE (new_mode))
+ FOR_EACH_MODE_FROM (new_mode, new_mode)
if (GET_MODE_SIZE (new_mode) == GET_MODE_SIZE (GET_MODE (op0))
&& GET_MODE_UNIT_SIZE (new_mode) == GET_MODE_SIZE (tmode)
&& targetm.vector_mode_supported_p (new_mode))
@@ -2019,8 +2018,7 @@ extract_fixed_bit_field_1 (machine_mode tmode, rtx op0,
/* Find the narrowest integer mode that contains the field. */
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
if (GET_MODE_BITSIZE (mode) >= bitsize + bitnum)
{
op0 = convert_to_mode (mode, op0, 0);
@@ -4084,15 +4082,13 @@ expand_divmod (int rem_flag, enum tree_code code, machine_mode mode,
optab2 = (op1_is_pow2 ? optab1
: (unsignedp ? udivmod_optab : sdivmod_optab));
- for (compute_mode = mode; compute_mode != VOIDmode;
- compute_mode = GET_MODE_WIDER_MODE (compute_mode))
+ FOR_EACH_MODE_FROM (compute_mode, mode)
if (optab_handler (optab1, compute_mode) != CODE_FOR_nothing
|| optab_handler (optab2, compute_mode) != CODE_FOR_nothing)
break;
if (compute_mode == VOIDmode)
- for (compute_mode = mode; compute_mode != VOIDmode;
- compute_mode = GET_MODE_WIDER_MODE (compute_mode))
+ FOR_EACH_MODE_FROM (compute_mode, mode)
if (optab_libfunc (optab1, compute_mode)
|| optab_libfunc (optab2, compute_mode))
break;
@@ -5494,8 +5490,7 @@ emit_store_flag_1 (rtx target, enum rtx_code code, rtx op0, rtx op1,
}
mclass = GET_MODE_CLASS (mode);
- for (compare_mode = mode; compare_mode != VOIDmode;
- compare_mode = GET_MODE_WIDER_MODE (compare_mode))
+ FOR_EACH_MODE_FROM (compare_mode, mode)
{
machine_mode optab_mode = mclass == MODE_CC ? CCmode : compare_mode;
icode = optab_handler (cstore_optab, optab_mode);
@@ -175,12 +175,10 @@ init_expr_target (void)
mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
{
machine_mode srcmode;
- for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
- srcmode = GET_MODE_WIDER_MODE (srcmode))
+ FOR_EACH_MODE_UNTIL (srcmode, mode)
{
enum insn_code ic;
@@ -547,8 +545,7 @@ convert_move (rtx to, rtx from, int unsignedp)
int shift_amount;
/* Search for a mode to convert via. */
- for (intermediate = from_mode; intermediate != VOIDmode;
- intermediate = GET_MODE_WIDER_MODE (intermediate))
+ FOR_EACH_MODE_FROM (intermediate, from_mode)
if (((can_extend_p (to_mode, intermediate, unsignedp)
!= CODE_FOR_nothing)
|| (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
@@ -700,12 +697,14 @@ alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
{
machine_mode tmode, xmode;
- for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
- tmode != VOIDmode;
- xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
- if (GET_MODE_SIZE (tmode) > max_pieces
- || SLOW_UNALIGNED_ACCESS (tmode, align))
- break;
+ xmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
+ {
+ if (GET_MODE_SIZE (tmode) > max_pieces
+ || SLOW_UNALIGNED_ACCESS (tmode, align))
+ break;
+ xmode = tmode;
+ }
align = MAX (align, GET_MODE_ALIGNMENT (xmode));
}
@@ -721,8 +720,7 @@ widest_int_mode_for_size (unsigned int size)
{
machine_mode tmode, mode = VOIDmode;
- for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
+ FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
if (GET_MODE_SIZE (tmode) < size)
mode = tmode;
@@ -1725,8 +1723,7 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
including more than one in the machine description unless
the more limited one has some advantage. */
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
{
enum insn_code code = direct_optab_handler (movmem_optab, mode);
@@ -2783,9 +2780,7 @@ copy_blkmode_to_reg (machine_mode mode, tree src)
{
/* Find the smallest integer mode large enough to hold the
entire structure. */
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
/* Have we found a large enough mode? */
if (GET_MODE_SIZE (mode) >= bytes)
break;
@@ -3041,8 +3036,7 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
expected_size = min_size;
}
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
{
enum insn_code code = direct_optab_handler (setmem_optab, mode);
@@ -3781,9 +3775,7 @@ compress_float_constant (rtx x, rtx y)
else
oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
- for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
- srcmode != orig_srcmode;
- srcmode = GET_MODE_WIDER_MODE (srcmode))
+ FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
{
enum insn_code ic;
rtx trunc_y;
@@ -192,10 +192,19 @@ public:
ALWAYS_INLINE machine_mode (const T &m) : m_mode (m) {}
ALWAYS_INLINE operator machine_mode_enum () const { return m_mode; }
+ static ALWAYS_INLINE bool includes_p (machine_mode_enum) { return true; }
+ static machine_mode from_int (int i);
+
protected:
machine_mode_enum m_mode;
};
+ALWAYS_INLINE machine_mode
+machine_mode::from_int (int i)
+{
+ return (machine_mode_enum) i;
+}
+
/* Return the base GET_MODE_SIZE value for MODE. */
ALWAYS_INLINE unsigned short
@@ -411,6 +420,15 @@ extern const unsigned char class_narrowest_mode[MAX_MODE_CLASS];
#define GET_CLASS_NARROWEST_MODE(CLASS) \
(machine_mode ((machine_mode_enum) class_narrowest_mode[CLASS]))
+/* Return the narrowest mode in T's class. */
+
+template<typename T>
+inline T
+get_narrowest_mode (T mode)
+{
+ return T::from_int (class_narrowest_mode[GET_MODE_CLASS (mode)]);
+}
+
/* Define the integer modes whose sizes are BITS_PER_UNIT and BITS_PER_WORD
and the mode whose class is Pmode and whose size is POINTER_SIZE. */
@@ -441,4 +459,95 @@ struct int_n_data_t {
extern bool int_n_enabled_p[NUM_INT_N_ENTS];
extern const int_n_data_t int_n_data[NUM_INT_N_ENTS];
+namespace mode_iterator
+{
+ /* Start mode iterator *ITER at the first mode in class MCLASS, if any. */
+
+ inline void
+ start (machine_mode *iter, enum mode_class mclass)
+ {
+ *iter = GET_CLASS_NARROWEST_MODE (mclass);
+ }
+
+ /* Return true if mode iterator *ITER has not reached the end. */
+
+ inline bool
+ iterate_p (machine_mode *iter)
+ {
+ return *iter != E_VOIDmode;
+ }
+
+ /* Set mode iterator *ITER to the next widest mode in the same class,
+ if any. */
+
+ inline void
+ get_wider (machine_mode *iter)
+ {
+ *iter = GET_MODE_WIDER_MODE (*iter);
+ }
+
+ /* Set mode iterator *ITER to the next widest mode in the same class.
+ Such a mode is known to exist. */
+
+ inline void
+ get_known_wider (machine_mode *iter)
+ {
+ *iter = GET_MODE_WIDER_MODE (*iter);
+ gcc_checking_assert (*iter != VOIDmode);
+ }
+
+ /* Set mode iterator *ITER to the mode that is two times wider than the
+ current one, if such a mode exists. */
+
+ inline void
+ get_2xwider (machine_mode *iter)
+ {
+ *iter = GET_MODE_2XWIDER_MODE (*iter);
+ }
+}
+
+/* Make ITERATOR iterate over all the modes in mode class CLASS,
+ from narrowest to widest. */
+#define FOR_EACH_MODE_IN_CLASS(ITERATOR, CLASS) \
+ for (mode_iterator::start (&(ITERATOR), CLASS); \
+ mode_iterator::iterate_p (&(ITERATOR)); \
+ mode_iterator::get_wider (&(ITERATOR)))
+
+/* Make ITERATOR iterate over all the modes in the range [START, END),
+ in order of increasing width. */
+#define FOR_EACH_MODE(ITERATOR, START, END) \
+ for ((ITERATOR) = (START); \
+ (ITERATOR) != (END); \
+ mode_iterator::get_known_wider (&(ITERATOR)))
+
+/* Make ITERATOR iterate over START and all wider modes in the same
+ class, in order of increasing width. */
+#define FOR_EACH_MODE_FROM(ITERATOR, START) \
+ for ((ITERATOR) = (START); \
+ mode_iterator::iterate_p (&(ITERATOR)); \
+ mode_iterator::get_wider (&(ITERATOR)))
+
+/* Make ITERATOR iterate over modes in the range [NARROWEST, END)
+ in order of increasing width, where NARROWEST is the narrowest mode
+ in END's class. */
+#define FOR_EACH_MODE_UNTIL(ITERATOR, END) \
+ FOR_EACH_MODE (ITERATOR, get_narrowest_mode (END), END)
+
+/* Make ITERATOR iterate over modes in the same class as MODE, in order
+ of increasing width. Start at the first mode wider than START,
+ or don't iterate at all if there is no wider mode. */
+#define FOR_EACH_WIDER_MODE(ITERATOR, START) \
+ for ((ITERATOR) = (START), mode_iterator::get_wider (&(ITERATOR)); \
+ mode_iterator::iterate_p (&(ITERATOR)); \
+ mode_iterator::get_wider (&(ITERATOR)))
+
+/* Make ITERATOR iterate over modes in the same class as MODE, in order
+ of increasing width, and with each mode being twice the width of the
+ previous mode. Start at the mode that is two times wider than START,
+ or don't iterate at all if there is no such mode. */
+#define FOR_EACH_2XWIDER_MODE(ITERATOR, START) \
+ for ((ITERATOR) = (START), mode_iterator::get_2xwider (&(ITERATOR)); \
+ mode_iterator::iterate_p (&(ITERATOR)); \
+ mode_iterator::get_2xwider (&(ITERATOR)))
+
#endif /* not HAVE_MACHINE_MODES */
@@ -4307,9 +4307,7 @@ omp_clause_aligned_alignment (tree clause)
static enum mode_class classes[]
= { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
for (int i = 0; i < 4; i += 2)
- for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, classes[i])
{
vmode = targetm.vectorize.preferred_simd_mode (mode);
if (GET_MODE_CLASS (vmode) != classes[i + 1])
@@ -194,21 +194,20 @@ get_best_extraction_insn (extraction_insn *insn,
machine_mode field_mode)
{
machine_mode mode = smallest_mode_for_size (struct_bits, MODE_INT);
- while (mode != VOIDmode)
+ FOR_EACH_MODE_FROM (mode, mode)
{
if (get_extraction_insn (insn, pattern, type, mode))
{
- while (mode != VOIDmode
- && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (field_mode)
- && !TRULY_NOOP_TRUNCATION_MODES_P (insn->field_mode,
- field_mode))
+ FOR_EACH_MODE_FROM (mode, mode)
{
+ if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (field_mode)
+ || TRULY_NOOP_TRUNCATION_MODES_P (insn->field_mode,
+ field_mode))
+ break;
get_extraction_insn (insn, pattern, type, mode);
- mode = GET_MODE_WIDER_MODE (mode);
}
return true;
}
- mode = GET_MODE_WIDER_MODE (mode);
}
return false;
}
@@ -1249,9 +1249,7 @@ expand_binop (machine_mode mode, optab binoptab, rtx op0, rtx op1,
if (CLASS_HAS_WIDER_MODES_P (mclass)
&& methods != OPTAB_DIRECT && methods != OPTAB_LIB)
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (binoptab, wider_mode) != CODE_FOR_nothing
|| (binoptab == smul_optab
@@ -1792,9 +1790,7 @@ expand_binop (machine_mode mode, optab binoptab, rtx op0, rtx op1,
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (find_widening_optab_handler (binoptab, wider_mode, mode, 1)
!= CODE_FOR_nothing
@@ -1950,9 +1946,7 @@ expand_twoval_unop (optab unoptab, rtx op0, rtx targ0, rtx targ1,
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (unoptab, wider_mode) != CODE_FOR_nothing)
{
@@ -2033,9 +2027,7 @@ expand_twoval_binop (optab binoptab, rtx op0, rtx op1, rtx targ0, rtx targ1,
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (binoptab, wider_mode) != CODE_FOR_nothing)
{
@@ -2137,9 +2129,7 @@ widen_leading (machine_mode mode, rtx op0, rtx target, optab unoptab)
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
machine_mode wider_mode;
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (unoptab, wider_mode) != CODE_FOR_nothing)
{
@@ -2309,9 +2299,7 @@ widen_bswap (machine_mode mode, rtx op0, rtx target)
if (!CLASS_HAS_WIDER_MODES_P (mclass))
return NULL_RTX;
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
if (optab_handler (bswap_optab, wider_mode) != CODE_FOR_nothing)
goto found;
return NULL_RTX;
@@ -2373,8 +2361,7 @@ expand_parity (machine_mode mode, rtx op0, rtx target)
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
machine_mode wider_mode;
- for (wider_mode = mode; wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_MODE_FROM (wider_mode, mode)
{
if (optab_handler (popcount_optab, wider_mode) != CODE_FOR_nothing)
{
@@ -2826,9 +2813,7 @@ expand_unop (machine_mode mode, optab unoptab, rtx op0, rtx target,
}
if (CLASS_HAS_WIDER_MODES_P (mclass))
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (unoptab, wider_mode) != CODE_FOR_nothing)
{
@@ -2995,9 +2980,7 @@ expand_unop (machine_mode mode, optab unoptab, rtx op0, rtx target,
if (CLASS_HAS_WIDER_MODES_P (mclass))
{
- for (wider_mode = GET_MODE_WIDER_MODE (mode);
- wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_WIDER_MODE (wider_mode, mode)
{
if (optab_handler (unoptab, wider_mode) != CODE_FOR_nothing
|| optab_libfunc (unoptab, wider_mode))
@@ -3797,9 +3780,7 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
/* Try to use a memory block compare insn - either cmpstr
or cmpmem will do. */
- for (cmp_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- cmp_mode != VOIDmode;
- cmp_mode = GET_MODE_WIDER_MODE (cmp_mode))
+ FOR_EACH_MODE_IN_CLASS (cmp_mode, MODE_INT)
{
cmp_code = direct_optab_handler (cmpmem_optab, cmp_mode);
if (cmp_code == CODE_FOR_nothing)
@@ -3861,9 +3842,8 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
mclass = GET_MODE_CLASS (mode);
test = gen_rtx_fmt_ee (comparison, VOIDmode, x, y);
- cmp_mode = mode;
- do
- {
+ FOR_EACH_MODE_FROM (cmp_mode, mode)
+ {
enum insn_code icode;
icode = optab_handler (cbranch_optab, cmp_mode);
if (icode != CODE_FOR_nothing
@@ -3887,9 +3867,7 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
if (methods == OPTAB_DIRECT || !CLASS_HAS_WIDER_MODES_P (mclass))
break;
- cmp_mode = GET_MODE_WIDER_MODE (cmp_mode);
}
- while (cmp_mode != VOIDmode);
if (methods != OPTAB_LIB_WIDEN)
goto fail;
@@ -4071,9 +4049,7 @@ prepare_float_lib_cmp (rtx x, rtx y, enum rtx_code comparison,
bool reversed_p = false;
cmp_mode = targetm.libgcc_cmp_return_mode ();
- for (mode = orig_mode;
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_FROM (mode, orig_mode)
{
if (code_to_optab (comparison)
&& (libfunc = optab_libfunc (code_to_optab (comparison), mode)))
@@ -4626,10 +4602,8 @@ expand_float (rtx to, rtx from, int unsignedp)
wider mode. If the integer mode is wider than the mode of FROM,
we can do the conversion signed even if the input is unsigned. */
- for (fmode = GET_MODE (to); fmode != VOIDmode;
- fmode = GET_MODE_WIDER_MODE (fmode))
- for (imode = GET_MODE (from); imode != VOIDmode;
- imode = GET_MODE_WIDER_MODE (imode))
+ FOR_EACH_MODE_FROM (fmode, GET_MODE (to))
+ FOR_EACH_MODE_FROM (imode, GET_MODE (from))
{
int doing_unsigned = unsignedp;
@@ -4676,8 +4650,7 @@ expand_float (rtx to, rtx from, int unsignedp)
least as wide as the target. Using FMODE will avoid rounding woes
with unsigned values greater than the signed maximum value. */
- for (fmode = GET_MODE (to); fmode != VOIDmode;
- fmode = GET_MODE_WIDER_MODE (fmode))
+ FOR_EACH_MODE_FROM (fmode, GET_MODE (to))
if (GET_MODE_PRECISION (GET_MODE (from)) < GET_MODE_BITSIZE (fmode)
&& can_float_p (fmode, GET_MODE (from), 0) != CODE_FOR_nothing)
break;
@@ -4824,10 +4797,8 @@ expand_fix (rtx to, rtx from, int unsignedp)
this conversion. If the integer mode is wider than the mode of TO,
we can do the conversion either signed or unsigned. */
- for (fmode = GET_MODE (from); fmode != VOIDmode;
- fmode = GET_MODE_WIDER_MODE (fmode))
- for (imode = GET_MODE (to); imode != VOIDmode;
- imode = GET_MODE_WIDER_MODE (imode))
+ FOR_EACH_MODE_FROM (fmode, GET_MODE (from))
+ FOR_EACH_MODE_FROM (imode, GET_MODE (to))
{
int doing_unsigned = unsignedp;
@@ -4887,8 +4858,7 @@ expand_fix (rtx to, rtx from, int unsignedp)
simply clears out that bit. The rest is trivial. */
if (unsignedp && GET_MODE_PRECISION (GET_MODE (to)) <= HOST_BITS_PER_WIDE_INT)
- for (fmode = GET_MODE (from); fmode != VOIDmode;
- fmode = GET_MODE_WIDER_MODE (fmode))
+ FOR_EACH_MODE_FROM (fmode, GET_MODE (from))
if (CODE_FOR_nothing != can_fix_p (GET_MODE (to), fmode, 0, &must_trunc)
&& (!DECIMAL_FLOAT_MODE_P (fmode)
|| GET_MODE_BITSIZE (fmode) > GET_MODE_PRECISION (GET_MODE (to))))
@@ -5089,10 +5059,8 @@ expand_sfix_optab (rtx to, rtx from, convert_optab tab)
this conversion. If the integer mode is wider than the mode of TO,
we can do the conversion either signed or unsigned. */
- for (fmode = GET_MODE (from); fmode != VOIDmode;
- fmode = GET_MODE_WIDER_MODE (fmode))
- for (imode = GET_MODE (to); imode != VOIDmode;
- imode = GET_MODE_WIDER_MODE (imode))
+ FOR_EACH_MODE_FROM (fmode, GET_MODE (from))
+ FOR_EACH_MODE_FROM (imode, GET_MODE (to))
{
icode = convert_optab_handler (tab, imode, fmode);
if (icode != CODE_FOR_nothing)
@@ -1765,10 +1765,7 @@ move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
{
machine_mode narrow_mode;
- for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- narrow_mode != VOIDmode
- && narrow_mode != GET_MODE (reg);
- narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
+ FOR_EACH_MODE_UNTIL (narrow_mode, GET_MODE (reg))
{
if (have_insn_for (STRICT_LOW_PART, narrow_mode)
&& ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
@@ -3237,13 +3237,9 @@ reg_to_stack (void)
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
{
machine_mode mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
FP_MODE_REG (i, mode) = gen_rtx_REG (mode, i);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
FP_MODE_REG (i, mode) = gen_rtx_REG (mode, i);
}
@@ -622,36 +622,28 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
held in REGNO. If none, we look for the largest floating-point mode.
If we still didn't find a valid mode, try CCmode. */
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
if ((unsigned) hard_regno_nregs[regno][mode] == nregs
&& HARD_REGNO_MODE_OK (regno, mode)
&& (! call_saved || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (found_mode))
found_mode = mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
if ((unsigned) hard_regno_nregs[regno][mode] == nregs
&& HARD_REGNO_MODE_OK (regno, mode)
&& (! call_saved || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (found_mode))
found_mode = mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
if ((unsigned) hard_regno_nregs[regno][mode] == nregs
&& HARD_REGNO_MODE_OK (regno, mode)
&& (! call_saved || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (found_mode))
found_mode = mode;
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
if ((unsigned) hard_regno_nregs[regno][mode] == nregs
&& HARD_REGNO_MODE_OK (regno, mode)
&& (! call_saved || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
@@ -5605,10 +5605,8 @@ init_num_sign_bit_copies_in_rep (void)
{
machine_mode mode, in_mode;
- for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
- in_mode = GET_MODE_WIDER_MODE (mode))
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (in_mode, MODE_INT)
+ FOR_EACH_MODE_UNTIL (mode, in_mode)
{
machine_mode i;
@@ -5619,7 +5617,7 @@ init_num_sign_bit_copies_in_rep (void)
/* We are in in_mode. Count how many bits outside of mode
have to be copies of the sign-bit. */
- for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
+ FOR_EACH_MODE (i, mode, in_mode)
{
machine_mode wider = GET_MODE_WIDER_MODE (i);
@@ -866,8 +866,7 @@ emit_case_decision_tree (tree index_expr, tree index_type,
{
int unsignedp = TYPE_UNSIGNED (index_type);
machine_mode wider_mode;
- for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
- wider_mode = GET_MODE_WIDER_MODE (wider_mode))
+ FOR_EACH_MODE_FROM (wider_mode, GET_MODE (index))
if (have_insn_for (COMPARE, wider_mode))
{
index = convert_to_mode (wider_mode, index, unsignedp);
@@ -306,8 +306,7 @@ mode_for_size (unsigned int size, enum mode_class mclass, int limit)
return BLKmode;
/* Get the first mode which has this size, in the specified class. */
- for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, mclass)
if (GET_MODE_PRECISION (mode) == size)
return mode;
@@ -348,8 +347,7 @@ smallest_mode_for_size (unsigned int size, enum mode_class mclass)
/* Get the first mode which has at least this size, in the
specified class. */
- for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, mclass)
if (GET_MODE_PRECISION (mode) >= size)
break;
@@ -501,7 +499,7 @@ mode_for_vector (machine_mode innermode, unsigned nunits)
/* Do not check vector_mode_supported_p here. We'll do that
later in vector_type_mode. */
- for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_FROM (mode, mode)
if (GET_MODE_NUNITS (mode) == nunits
&& GET_MODE_INNER (mode) == innermode)
break;
@@ -1885,8 +1883,7 @@ finish_bitfield_representative (tree repr, tree field)
gcc_assert (maxbitsize % BITS_PER_UNIT == 0);
/* Find the smallest nice mode to use. */
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
if (GET_MODE_BITSIZE (mode) >= bitsize)
break;
if (mode != VOIDmode
@@ -3824,9 +3824,8 @@ target_supports_divmod_p (optab divmod_optab, optab div_optab, machine_mode mode
{
/* If optab_handler exists for div_optab, perhaps in a wider mode,
we don't want to use the libfunc even if it exists for given mode. */
- for (machine_mode div_mode = mode;
- div_mode != VOIDmode;
- div_mode = GET_MODE_WIDER_MODE (div_mode))
+ machine_mode div_mode;
+ FOR_EACH_MODE_FROM (div_mode, mode)
if (optab_handler (div_optab, div_mode) != CODE_FOR_nothing)
return false;
@@ -1114,7 +1114,7 @@ type_for_widest_vector_mode (tree type, optab op)
else
mode = MIN_MODE_VECTOR_INT;
- for (; mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode))
+ FOR_EACH_MODE_FROM (mode, mode)
if (GET_MODE_INNER (mode) == inner_mode
&& GET_MODE_NUNITS (mode) > best_nunits
&& optab_handler (op, mode) != CODE_FOR_nothing)
@@ -4213,12 +4213,12 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
<= GET_MODE_SIZE (TYPE_MODE (rhs_type))))
goto unsupported;
- rhs_mode = TYPE_MODE (rhs_type);
fltsz = GET_MODE_SIZE (TYPE_MODE (lhs_type));
- for (rhs_mode = GET_MODE_2XWIDER_MODE (TYPE_MODE (rhs_type));
- rhs_mode != VOIDmode && GET_MODE_SIZE (rhs_mode) <= fltsz;
- rhs_mode = GET_MODE_2XWIDER_MODE (rhs_mode))
+ FOR_EACH_2XWIDER_MODE (rhs_mode, TYPE_MODE (rhs_type))
{
+ if (GET_MODE_SIZE (rhs_mode) > fltsz)
+ break;
+
cvt_type
= build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
@@ -6305,13 +6305,14 @@ prepare_call_arguments (basic_block bb, rtx_insn *insn)
else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
|| GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
{
- machine_mode mode = GET_MODE (x);
+ machine_mode mode;
- while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
- && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
+ FOR_EACH_WIDER_MODE (mode, GET_MODE (x))
{
- rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
+ if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
+ break;
+ rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
if (reg == NULL_RTX || !REG_P (reg))
continue;
val = cselib_lookup (reg, mode, 0, VOIDmode);