@@ -459,7 +459,7 @@ adjust_bit_field_mem_for_reg (enum extraction_pattern pattern,
bit_field_mode_iterator iter (bitsize, bitnum, bitregion_start,
bitregion_end, MEM_ALIGN (op0),
MEM_VOLATILE_P (op0));
- machine_mode best_mode;
+ scalar_int_mode best_mode;
if (iter.next_mode (&best_mode))
{
/* We can use a memory in BEST_MODE. See whether this is true for
@@ -477,7 +477,7 @@ adjust_bit_field_mem_for_reg (enum extraction_pattern pattern,
fieldmode))
limit_mode = insn.field_mode;
- machine_mode wider_mode;
+ scalar_int_mode wider_mode;
while (iter.next_mode (&wider_mode)
&& GET_MODE_SIZE (wider_mode) <= GET_MODE_SIZE (limit_mode))
best_mode = wider_mode;
@@ -1086,7 +1086,8 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
bit region. */
if (MEM_P (str_rtx) && bitregion_start > 0)
{
- machine_mode bestmode;
+ scalar_int_mode best_mode;
+ machine_mode addr_mode = VOIDmode;
HOST_WIDE_INT offset, size;
gcc_assert ((bitregion_start % BITS_PER_UNIT) == 0);
@@ -1096,11 +1097,13 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
size = (bitnum + bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
bitregion_end -= bitregion_start;
bitregion_start = 0;
- bestmode = get_best_mode (bitsize, bitnum,
- bitregion_start, bitregion_end,
- MEM_ALIGN (str_rtx), VOIDmode,
- MEM_VOLATILE_P (str_rtx));
- str_rtx = adjust_bitfield_address_size (str_rtx, bestmode, offset, size);
+ if (get_best_mode (bitsize, bitnum,
+ bitregion_start, bitregion_end,
+ MEM_ALIGN (str_rtx), 0,
+ MEM_VOLATILE_P (str_rtx), &best_mode))
+ addr_mode = best_mode;
+ str_rtx = adjust_bitfield_address_size (str_rtx, addr_mode,
+ offset, size);
}
if (!store_bit_field_1 (str_rtx, bitsize, bitnum,
@@ -1134,10 +1137,10 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
if (GET_MODE_BITSIZE (mode) == 0
|| GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (word_mode))
mode = word_mode;
- mode = get_best_mode (bitsize, bitnum, bitregion_start, bitregion_end,
- MEM_ALIGN (op0), mode, MEM_VOLATILE_P (op0));
-
- if (mode == VOIDmode)
+ scalar_int_mode best_mode;
+ if (!get_best_mode (bitsize, bitnum, bitregion_start, bitregion_end,
+ MEM_ALIGN (op0), GET_MODE_BITSIZE (mode),
+ MEM_VOLATILE_P (op0), &best_mode))
{
/* The only way this should occur is if the field spans word
boundaries. */
@@ -1146,7 +1149,7 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
return;
}
- op0 = narrow_bit_field_mem (op0, mode, bitsize, bitnum, &bitnum);
+ op0 = narrow_bit_field_mem (op0, best_mode, bitsize, bitnum, &bitnum);
}
store_fixed_bit_field_1 (op0, bitsize, bitnum, value, reverse);
@@ -1932,11 +1935,9 @@ extract_fixed_bit_field (machine_mode tmode, rtx op0,
{
if (MEM_P (op0))
{
- machine_mode mode
- = get_best_mode (bitsize, bitnum, 0, 0, MEM_ALIGN (op0), word_mode,
- MEM_VOLATILE_P (op0));
-
- if (mode == VOIDmode)
+ scalar_int_mode mode;
+ if (!get_best_mode (bitsize, bitnum, 0, 0, MEM_ALIGN (op0),
+ BITS_PER_WORD, MEM_VOLATILE_P (op0), &mode))
/* The only way this should occur is if the field spans word
boundaries. */
return extract_split_bit_field (op0, bitsize, bitnum, unsignedp,
@@ -4675,13 +4675,14 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT offset1;
if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
- str_mode = word_mode;
- str_mode = get_best_mode (bitsize, bitpos,
- bitregion_start, bitregion_end,
- MEM_ALIGN (str_rtx), str_mode, 0);
- if (str_mode == VOIDmode)
+ str_bitsize = BITS_PER_WORD;
+
+ scalar_int_mode best_mode;
+ if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
+ MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
return false;
- str_bitsize = GET_MODE_BITSIZE (str_mode);
+ str_mode = best_mode;
+ str_bitsize = GET_MODE_BITSIZE (best_mode);
offset1 = bitpos;
bitpos %= str_bitsize;
@@ -3857,7 +3857,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode;
+ scalar_int_mode nmode;
int lunsignedp, runsignedp;
int lreversep, rreversep;
int lvolatilep = 0, rvolatilep = 0;
@@ -3904,12 +3905,11 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
/* See if we can find a mode to refer to this field. We should be able to,
but fail if we can't. */
- nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
- TYPE_ALIGN (TREE_TYPE (rinner))),
- word_mode, false);
- if (nmode == VOIDmode)
+ if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ BITS_PER_WORD, false, &nmode))
return 0;
/* Set signed and unsigned types of the precision of this mode for the
@@ -5467,7 +5467,7 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- machine_mode lnmode, rnmode;
+ scalar_int_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
@@ -5653,10 +5653,9 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
to be relative to a field of that size. */
first_bit = MIN (ll_bitpos, rl_bitpos);
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
- lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
- volatilep);
- if (lnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
+ volatilep, &lnmode))
return 0;
lnbitsize = GET_MODE_BITSIZE (lnmode);
@@ -5718,10 +5717,9 @@ fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
first_bit = MIN (lr_bitpos, rr_bitpos);
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
- rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
- volatilep);
- if (rnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
+ volatilep, &rnmode))
return 0;
rnbitsize = GET_MODE_BITSIZE (rnmode);
@@ -618,11 +618,11 @@ public:
bit_field_mode_iterator (HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, HOST_WIDE_INT,
unsigned int, bool);
- bool next_mode (machine_mode *);
+ bool next_mode (scalar_int_mode *);
bool prefer_smaller_modes ();
private:
- machine_mode m_mode;
+ opt_scalar_int_mode m_mode;
/* We use signed values here because the bit position can be negative
for invalid input such as gcc.dg/pr48335-8.c. */
HOST_WIDE_INT m_bitsize;
@@ -636,11 +636,9 @@ private:
/* Find the best mode to use to access a bit field. */
-extern machine_mode get_best_mode (int, int,
- unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
- unsigned int,
- machine_mode, bool);
+extern bool get_best_mode (int, int, unsigned HOST_WIDE_INT,
+ unsigned HOST_WIDE_INT, unsigned int,
+ unsigned HOST_WIDE_INT, bool, scalar_int_mode *);
/* Determine alignment, 1<=result<=BIGGEST_ALIGNMENT. */
@@ -2712,15 +2712,15 @@ bit_field_mode_iterator
available, storing it in *OUT_MODE if so. */
bool
-bit_field_mode_iterator::next_mode (machine_mode *out_mode)
+bit_field_mode_iterator::next_mode (scalar_int_mode *out_mode)
{
- for (; m_mode != VOIDmode;
- m_mode = GET_MODE_WIDER_MODE (m_mode).else_void ())
+ for (; m_mode.exists (); m_mode = GET_MODE_WIDER_MODE (*m_mode))
{
- unsigned int unit = GET_MODE_BITSIZE (m_mode);
+ scalar_int_mode mode = *m_mode;
+ unsigned int unit = GET_MODE_BITSIZE (mode);
/* Skip modes that don't have full precision. */
- if (unit != GET_MODE_PRECISION (m_mode))
+ if (unit != GET_MODE_PRECISION (mode))
continue;
/* Stop if the mode is too wide to handle efficiently. */
@@ -2747,12 +2747,12 @@ bit_field_mode_iterator::next_mode (machine_mode *out_mode)
break;
/* Stop if the mode requires too much alignment. */
- if (GET_MODE_ALIGNMENT (m_mode) > m_align
- && SLOW_UNALIGNED_ACCESS (m_mode, m_align))
+ if (GET_MODE_ALIGNMENT (mode) > m_align
+ && SLOW_UNALIGNED_ACCESS (mode, m_align))
break;
- *out_mode = m_mode;
- m_mode = GET_MODE_WIDER_MODE (m_mode).else_void ();
+ *out_mode = mode;
+ m_mode = GET_MODE_WIDER_MODE (mode);
m_count++;
return true;
}
@@ -2780,8 +2780,8 @@ bit_field_mode_iterator::prefer_smaller_modes ()
any adjacent non bit-fields.
The underlying object is known to be aligned to a boundary of ALIGN bits.
- If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
- larger than LARGEST_MODE (usually SImode).
+ If LARGEST_MODE_BITSIZE is not 0, it means that we should not use a mode
+ larger than LARGEST_MODE_BITSIZE bits.
If no mode meets all these conditions, we return VOIDmode.
@@ -2795,17 +2795,18 @@ bit_field_mode_iterator::prefer_smaller_modes ()
If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
decide which of the above modes should be used. */
-machine_mode
+bool
get_best_mode (int bitsize, int bitpos,
unsigned HOST_WIDE_INT bitregion_start,
unsigned HOST_WIDE_INT bitregion_end,
unsigned int align,
- machine_mode largest_mode, bool volatilep)
+ unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
+ scalar_int_mode *best_mode)
{
bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start,
bitregion_end, align, volatilep);
- machine_mode widest_mode = VOIDmode;
- machine_mode mode;
+ scalar_int_mode mode;
+ bool found = false;
while (iter.next_mode (&mode)
/* ??? For historical reasons, reject modes that would normally
receive greater alignment, even if unaligned accesses are
@@ -2864,14 +2865,16 @@ get_best_mode (int bitsize, int bitpos,
so that the final bitfield reference still has a MEM_EXPR
and MEM_OFFSET. */
&& GET_MODE_ALIGNMENT (mode) <= align
- && (largest_mode == VOIDmode
- || GET_MODE_SIZE (mode) <= GET_MODE_SIZE (largest_mode)))
+ && (largest_mode_bitsize == 0
+ || GET_MODE_BITSIZE (mode) <= largest_mode_bitsize))
{
- widest_mode = mode;
+ *best_mode = mode;
+ found = true;
if (iter.prefer_smaller_modes ())
break;
}
- return widest_mode;
+
+ return found;
}
/* Gets minimal and maximal values for MODE (signed or unsigned depending on