for gcc/ChangeLog
from Alexandre Oliva <aoliva@redhat.com>
PR debug/43092
PR debug/43494
* rtl.h (for_each_inc_dec_fn): New type.
(for_each_inc_dec): Declare.
* rtlanal.c (struct for_each_inc_dec_ops): New type.
(for_each_inc_dec_find_inc_dec): New fn.
(for_each_inc_dec_find_mem): New fn.
(for_each_inc_dec): New fn.
* dse.c (struct insn_size): Remove.
(replace_inc_dec, replace_inc_dec_mem): Remove.
(emit_inc_dec_insn_before): New fn.
(check_for_inc_dec): Use it, along with for_each_inc_dec.
(canon_address): Pass mem modes to cselib_lookup.
* cselib.h (cselib_lookup): Add memmode argument. Adjust callers.
(cselib_lookup_from_insn): Likewise.
(cselib_subst_to_values): Likewise.
* cselib.c (find_slot_memmode): New var.
(cselib_find_slot): New fn. Use it instead of
htab_find_slot_with_hash everywhere.
(entry_and_rtx_equal_p): Use find_slot_memmode.
(autoinc_split): New fn.
(rtx_equal_for_cselib_p): Rename and implement in terms of...
(rtx_equal_for_cselib_1): ... this. Take memmode, pass it on.
Deal with autoinc. Special-case recursion into MEMs.
(cselib_hash_rtx): Likewise.
(cselib_lookup_mem): Infer pmode from address mode. Distinguish
address and MEM modes.
(cselib_subst_to_values): Add memmode, pass it on.
Deal with autoinc.
(cselib_lookup): Add memmode argument, pass it on.
(cselib_lookup_from_insn): Add memmode.
(struct cselib_record_autoinc_data): New.
(cselib_record_autoinc_cb): New fn.
(cselib_record_sets): Use it, along with for_each_inc_dec. Pass MEM
mode to cselib_lookup.
* var-tracking.c (replace_expr_with_values, use_type): Pass MEM mode
to cselib_lookup.
(add_uses): Likewise, also to cselib_subst_to_values.
(add_stores): Likewise.
* sched-deps.c (add_insn_mem_dependence): Pass mode to
cselib_subst_to_values.
(sched_analyze_1, sched_analyze_2): Likewise. Adjusted.
* gcse.c (do_local_cprop): Adjusted.
* postreload.c (reload_cse_simplify_set): Adjusted.
(reload_cse_simplify_operands): Adjusted.
* sel-sched-dump (debug_mem_addr_value): Pass mode.
===================================================================
@@ -1865,6 +1865,17 @@ extern int computed_jump_p (const_rtx);
typedef int (*rtx_function) (rtx *, void *);
extern int for_each_rtx (rtx *, rtx_function, void *);
+/* Callback for for_each_inc_dec, to process the autoinc operation OP
+ within MEM. It sets DEST to SRC + SRCOFF, or SRC if SRCOFF is
+ NULL. The callback is passed the same opaque ARG passed to
+ for_each_inc_dec. Return zero to continue looking for other
+ autoinc operations, -1 to skip OP's operands, and any other value
+ to interrupt the traversal and return that value to the caller of
+ for_each_inc_dec. */
+typedef int (*for_each_inc_dec_fn) (rtx mem, rtx op, rtx dest, rtx src,
+ rtx srcoff, void *arg);
+extern int for_each_inc_dec (rtx *, for_each_inc_dec_fn, void *arg);
+
typedef int (*rtx_equal_p_callback_function) (const_rtx *, const_rtx *,
rtx *, rtx *);
extern int rtx_equal_p_cb (const_rtx, const_rtx,
===================================================================
@@ -2866,7 +2866,122 @@ for_each_rtx (rtx *x, rtx_function f, vo
return for_each_rtx_1 (*x, i, f, data);
}
+
+
+/* Data structure that holds the internal state communicated between
+ for_each_inc_dec, for_each_inc_dec_find_mem and
+ for_each_inc_dec_find_inc_dec. */
+
+struct for_each_inc_dec_ops {
+ /* The function to be called for each autoinc operation found. */
+ for_each_inc_dec_fn fn;
+ /* The opaque argument to be passed to it. */
+ void *arg;
+ /* The MEM we're visiting, if any. */
+ rtx mem;
+};
+
+static int for_each_inc_dec_find_mem (rtx *r, void *d);
+/* Add an insn to do the add inside a x if it is a
+ PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
+ the size of the mode of the MEM that this is inside of. */
+
+static int
+for_each_inc_dec_find_inc_dec (rtx *r, void *d)
+{
+ rtx x = *r;
+ struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
+ int size = GET_MODE_SIZE (GET_MODE (data->mem));
+
+ switch (GET_CODE (x))
+ {
+ case PRE_INC:
+ case POST_INC:
+ {
+ rtx r1 = XEXP (x, 0);
+ rtx c = gen_int_mode (size, GET_MODE (r1));
+ return data->fn (data->mem, x, r1, r1, c, data->arg);
+ }
+
+ case PRE_DEC:
+ case POST_DEC:
+ {
+ rtx r1 = XEXP (x, 0);
+ rtx c = gen_int_mode (-size, GET_MODE (r1));
+ return data->fn (data->mem, x, r1, r1, c, data->arg);
+ }
+
+ case PRE_MODIFY:
+ case POST_MODIFY:
+ {
+ rtx r1 = XEXP (x, 0);
+ rtx add = XEXP (x, 1);
+ return data->fn (data->mem, x, r1, add, NULL, data->arg);
+ }
+
+ case MEM:
+ {
+ rtx save = data->mem;
+ int ret = for_each_inc_dec_find_mem (r, d);
+ data->mem = save;
+ return ret;
+ }
+
+ default:
+ return 0;
+ }
+}
+
+/* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
+ and generate an add to replace that. */
+
+static int
+for_each_inc_dec_find_mem (rtx *r, void *d)
+{
+ rtx x = *r;
+ if (x != NULL_RTX && MEM_P (x))
+ {
+ struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
+ int result;
+
+ data->mem = x;
+
+ result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
+ data);
+ if (result)
+ return result;
+
+ return -1;
+ }
+ return 0;
+}
+
+/* Traverse *X looking for MEMs, and for autoinc operations within
+ them. For each such autoinc operation found, call FN, passing it
+ the innermost enclosing MEM, the operation itself, the RTX modified
+ by the operation, two RTXs (the second may be NULL) that, once
+ added, represent the value to be held by the modified RTX
+ afterwards, and ARG. FN is to return -1 to skip looking for other
+ autoinc operations within the visited operation, 0 to continue the
+ traversal, or any other value to have it returned to the caller of
+ for_each_inc_dec. */
+
+int
+for_each_inc_dec (rtx *x,
+ for_each_inc_dec_fn fn,
+ void *arg)
+{
+ struct for_each_inc_dec_ops data;
+
+ data.fn = fn;
+ data.arg = arg;
+ data.mem = NULL;
+
+ return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
+}
+
+
/* Searches X for any reference to REGNO, returning the rtx of the
reference found if any. Otherwise, returns NULL_RTX. */
===================================================================
@@ -807,81 +807,22 @@ free_store_info (insn_info_t insn_info)
}
-struct insn_size {
- int size;
- rtx insn;
-};
-
-
-/* Add an insn to do the add inside a x if it is a
- PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
- the size of the mode of the MEM that this is inside of. */
-
static int
-replace_inc_dec (rtx *r, void *d)
+emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
+ rtx op ATTRIBUTE_UNUSED,
+ rtx dest, rtx src, rtx srcoff, void *arg)
{
- rtx x = *r;
- struct insn_size *data = (struct insn_size *)d;
- switch (GET_CODE (x))
- {
- case PRE_INC:
- case POST_INC:
- {
- rtx r1 = XEXP (x, 0);
- rtx c = gen_int_mode (data->size, GET_MODE (r1));
- emit_insn_before (gen_rtx_SET (VOIDmode, r1,
- gen_rtx_PLUS (GET_MODE (r1), r1, c)),
- data->insn);
- return -1;
- }
-
- case PRE_DEC:
- case POST_DEC:
- {
- rtx r1 = XEXP (x, 0);
- rtx c = gen_int_mode (-data->size, GET_MODE (r1));
- emit_insn_before (gen_rtx_SET (VOIDmode, r1,
- gen_rtx_PLUS (GET_MODE (r1), r1, c)),
- data->insn);
- return -1;
- }
-
- case PRE_MODIFY:
- case POST_MODIFY:
- {
- /* We can reuse the add because we are about to delete the
- insn that contained it. */
- rtx add = XEXP (x, 0);
- rtx r1 = XEXP (add, 0);
- emit_insn_before (gen_rtx_SET (VOIDmode, r1, add), data->insn);
- return -1;
- }
+ rtx insn = (rtx)arg;
- default:
- return 0;
- }
-}
+ if (srcoff)
+ src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
+ /* We can reuse all operands without copying, because we are about
+ to delete the insn that contained it. */
-/* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
- and generate an add to replace that. */
+ emit_insn_before (gen_rtx_SET (VOIDmode, dest, src), insn);
-static int
-replace_inc_dec_mem (rtx *r, void *d)
-{
- rtx x = *r;
- if (x != NULL_RTX && MEM_P (x))
- {
- struct insn_size data;
-
- data.size = GET_MODE_SIZE (GET_MODE (x));
- data.insn = (rtx) d;
-
- for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
-
- return -1;
- }
- return 0;
+ return -1;
}
/* Before we delete INSN, make sure that the auto inc/dec, if it is
@@ -892,7 +833,7 @@ check_for_inc_dec (rtx insn)
{
rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
if (note)
- for_each_rtx (&insn, replace_inc_dec_mem, insn);
+ for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn);
}
@@ -1107,7 +1048,7 @@ canon_address (rtx mem,
*alias_set_out = 0;
- cselib_lookup (mem_address, address_mode, 1);
+ cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
if (dump_file)
{
@@ -1187,7 +1128,7 @@ canon_address (rtx mem,
}
}
- *base = cselib_lookup (address, address_mode, true);
+ *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
*group_id = -1;
if (*base == NULL)
===================================================================
@@ -76,8 +76,10 @@ extern void (*cselib_discard_hook) (csel
extern void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
int n_sets);
-extern cselib_val *cselib_lookup (rtx, enum machine_mode, int);
-extern cselib_val *cselib_lookup_from_insn (rtx, enum machine_mode, int, rtx);
+extern cselib_val *cselib_lookup (rtx, enum machine_mode,
+ int, enum machine_mode);
+extern cselib_val *cselib_lookup_from_insn (rtx, enum machine_mode,
+ int, enum machine_mode, rtx);
extern void cselib_init (int);
extern void cselib_clear_table (void);
extern void cselib_finish (void);
@@ -91,7 +93,7 @@ extern rtx cselib_expand_value_rtx_cb (r
cselib_expand_callback, void *);
extern bool cselib_dummy_expand_value_rtx_cb (rtx, bitmap, int,
cselib_expand_callback, void *);
-extern rtx cselib_subst_to_values (rtx);
+extern rtx cselib_subst_to_values (rtx, enum machine_mode);
extern void cselib_invalidate_rtx (rtx);
extern void cselib_reset_table (unsigned int);
===================================================================
@@ -57,7 +57,8 @@ static void unchain_one_elt_loc_list (st
static int discard_useless_locs (void **, void *);
static int discard_useless_values (void **, void *);
static void remove_useless_values (void);
-static unsigned int cselib_hash_rtx (rtx, int);
+static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
+static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
static cselib_val *cselib_lookup_mem (rtx, int);
@@ -385,6 +386,26 @@ cselib_get_next_uid (void)
return next_uid;
}
+/* See the documentation of cselib_find_slot below. */
+static enum machine_mode find_slot_memmode;
+
+/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
+ INSERTing if requested. When X is part of the address of a MEM,
+ MEMMODE should specify the mode of the MEM. While searching the
+ table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
+ in X can be resolved. */
+
+static void **
+cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
+ enum machine_mode memmode)
+{
+ void **slot;
+ find_slot_memmode = memmode;
+ slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
+ find_slot_memmode = VOIDmode;
+ return slot;
+}
+
/* The equality test for our hash table. The first argument ENTRY is a table
element (i.e. a cselib_val), while the second arg X is an rtx. We know
that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
@@ -414,7 +435,7 @@ entry_and_rtx_equal_p (const void *entry
/* We don't guarantee that distinct rtx's have different hash values,
so we need to do a comparison. */
for (l = v->locs; l; l = l->next)
- if (rtx_equal_for_cselib_p (l->loc, x))
+ if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
{
promote_debug_loc (l);
return 1;
@@ -626,13 +647,59 @@ cselib_reg_set_mode (const_rtx x)
int
rtx_equal_for_cselib_p (rtx x, rtx y)
{
+ return rtx_equal_for_cselib_1 (x, y, VOIDmode);
+}
+
+/* If x is a PLUS or an autoinc operation, expand the operation,
+ storing the offset, if any, in *OFF. */
+
+static rtx
+autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
+{
+ switch (GET_CODE (x))
+ {
+ case PLUS:
+ *off = XEXP (x, 1);
+ return XEXP (x, 0);
+
+ case PRE_DEC:
+ if (memmode == VOIDmode)
+ return x;
+
+ *off = GEN_INT (-GET_MODE_SIZE (memmode));
+ return XEXP (x, 0);
+ break;
+
+ case PRE_INC:
+ if (memmode == VOIDmode)
+ return x;
+
+ *off = GEN_INT (GET_MODE_SIZE (memmode));
+ return XEXP (x, 0);
+
+ case PRE_MODIFY:
+ return XEXP (x, 1);
+
+ case POST_DEC:
+ case POST_INC:
+ case POST_MODIFY:
+ return XEXP (x, 0);
+
+ default:
+ return x;
+ }
+}
+
+static int
+rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
+{
enum rtx_code code;
const char *fmt;
int i;
if (REG_P (x) || MEM_P (x))
{
- cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
+ cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
if (e)
x = e->val_rtx;
@@ -640,7 +707,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
if (REG_P (y) || MEM_P (y))
{
- cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
+ cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
if (e)
y = e->val_rtx;
@@ -664,7 +731,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
/* Avoid infinite recursion. */
if (REG_P (t) || MEM_P (t))
continue;
- else if (rtx_equal_for_cselib_p (t, y))
+ else if (rtx_equal_for_cselib_1 (t, y, memmode))
return 1;
}
@@ -682,16 +749,37 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
if (REG_P (t) || MEM_P (t))
continue;
- else if (rtx_equal_for_cselib_p (x, t))
+ else if (rtx_equal_for_cselib_1 (x, t, memmode))
return 1;
}
return 0;
}
- if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
+ if (GET_MODE (x) != GET_MODE (y))
return 0;
+ if (GET_CODE (x) != GET_CODE (y))
+ {
+ rtx xorig = x, yorig = y;
+ rtx xoff = NULL, yoff = NULL;
+
+ x = autoinc_split (x, &xoff, memmode);
+ y = autoinc_split (y, &yoff, memmode);
+
+ if (!xoff != !yoff)
+ return 0;
+
+ if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
+ return 0;
+
+ /* Don't recurse if nothing changed. */
+ if (x != xorig || y != yorig)
+ return rtx_equal_for_cselib_1 (x, y, memmode);
+
+ return 0;
+ }
+
/* These won't be handled correctly by the code below. */
switch (GET_CODE (x))
{
@@ -707,6 +795,11 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
case LABEL_REF:
return XEXP (x, 0) == XEXP (y, 0);
+ case MEM:
+ /* We have to compare any autoinc operations in the addresses
+ using this MEM's mode. */
+ return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
+
default:
break;
}
@@ -739,18 +832,18 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
/* And the corresponding elements must match. */
for (j = 0; j < XVECLEN (x, i); j++)
- if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
- XVECEXP (y, i, j)))
+ if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
+ XVECEXP (y, i, j), memmode))
return 0;
break;
case 'e':
if (i == 1
&& targetm.commutative_p (x, UNKNOWN)
- && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
- && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
+ && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
+ && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
return 1;
- if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
+ if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
return 0;
break;
@@ -802,6 +895,8 @@ wrap_constant (enum machine_mode mode, r
that take commutativity into account.
If we wanted to also support associative rules, we'd have to use a different
strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
+ MEMMODE indicates the mode of an enclosing MEM, and it's only
+ used to compute auto-inc values.
We used to have a MODE argument for hashing for CONST_INTs, but that
didn't make sense, since it caused spurious hash differences between
(set (reg:SI 1) (const_int))
@@ -812,7 +907,7 @@ wrap_constant (enum machine_mode mode, r
in a comparison anyway, since relying on hash differences is unsafe. */
static unsigned int
-cselib_hash_rtx (rtx x, int create)
+cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
{
cselib_val *e;
int i, j;
@@ -827,7 +922,7 @@ cselib_hash_rtx (rtx x, int create)
{
case MEM:
case REG:
- e = cselib_lookup (x, GET_MODE (x), create);
+ e = cselib_lookup (x, GET_MODE (x), create, memmode);
if (! e)
return 0;
@@ -873,7 +968,7 @@ cselib_hash_rtx (rtx x, int create)
for (i = 0; i < units; ++i)
{
elt = CONST_VECTOR_ELT (x, i);
- hash += cselib_hash_rtx (elt, 0);
+ hash += cselib_hash_rtx (elt, 0, memmode);
}
return hash;
@@ -906,10 +1001,26 @@ cselib_hash_rtx (rtx x, int create)
case PRE_DEC:
case PRE_INC:
+ /* We can't compute these without knowing the MEM mode. */
+ gcc_assert (memmode != VOIDmode);
+ i = GET_MODE_SIZE (memmode);
+ if (code == PRE_DEC)
+ i = -i;
+ hash += (unsigned) PLUS - (unsigned)code
+ + cselib_hash_rtx (XEXP (x, 0), create, memmode)
+ + cselib_hash_rtx (GEN_INT (i), create, memmode);
+ return hash ? hash : 1 + (unsigned) PLUS;
+
+ case PRE_MODIFY:
+ gcc_assert (memmode != VOIDmode);
+ return cselib_hash_rtx (XEXP (x, 1), create, memmode);
+
case POST_DEC:
case POST_INC:
case POST_MODIFY:
- case PRE_MODIFY:
+ gcc_assert (memmode != VOIDmode);
+ return cselib_hash_rtx (XEXP (x, 0), create, memmode);
+
case PC:
case CC0:
case CALL:
@@ -935,7 +1046,7 @@ cselib_hash_rtx (rtx x, int create)
case 'e':
{
rtx tem = XEXP (x, i);
- unsigned int tem_hash = cselib_hash_rtx (tem, create);
+ unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
if (tem_hash == 0)
return 0;
@@ -947,7 +1058,7 @@ cselib_hash_rtx (rtx x, int create)
for (j = 0; j < XVECLEN (x, i); j++)
{
unsigned int tem_hash
- = cselib_hash_rtx (XVECEXP (x, i, j), create);
+ = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
if (tem_hash == 0)
return 0;
@@ -1059,7 +1170,7 @@ add_mem_for_addr (cselib_val *addr_elt,
static cselib_val *
cselib_lookup_mem (rtx x, int create)
{
- enum machine_mode mode = GET_MODE (x);
+ enum machine_mode mode = GET_MODE (x), pmode = Pmode;
void **slot;
cselib_val *addr;
cselib_val *mem_elt;
@@ -1070,8 +1181,11 @@ cselib_lookup_mem (rtx x, int create)
|| (FLOAT_MODE_P (mode) && flag_float_store))
return 0;
+ if (GET_MODE (XEXP (x, 0)) != VOIDmode)
+ pmode = GET_MODE (XEXP (x, 0));
+
/* Look up the value for the address. */
- addr = cselib_lookup (XEXP (x, 0), mode, create);
+ addr = cselib_lookup (XEXP (x, 0), pmode, create, mode);
if (! addr)
return 0;
@@ -1088,8 +1202,8 @@ cselib_lookup_mem (rtx x, int create)
mem_elt = new_cselib_val (next_uid, mode, x);
add_mem_for_addr (addr, mem_elt, x);
- slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
- mem_elt->hash, INSERT);
+ slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
+ INSERT, mode);
*slot = mem_elt;
return mem_elt;
}
@@ -1521,10 +1635,11 @@ cselib_expand_value_rtx_1 (rtx orig, str
with VALUE expressions. This way, it becomes independent of changes
to registers and memory.
X isn't actually modified; if modifications are needed, new rtl is
- allocated. However, the return value can share rtl with X. */
+ allocated. However, the return value can share rtl with X.
+ If X is within a MEM, MEMMODE must be the mode of the MEM. */
rtx
-cselib_subst_to_values (rtx x)
+cselib_subst_to_values (rtx x, enum machine_mode memmode)
{
enum rtx_code code = GET_CODE (x);
const char *fmt = GET_RTX_FORMAT (code);
@@ -1561,14 +1676,24 @@ cselib_subst_to_values (rtx x)
case CONST_FIXED:
return x;
- case POST_INC:
+ case PRE_DEC:
case PRE_INC:
+ gcc_assert (memmode != VOIDmode);
+ i = GET_MODE_SIZE (memmode);
+ if (code == PRE_DEC)
+ i = -i;
+ return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
+ memmode);
+
+ case PRE_MODIFY:
+ gcc_assert (memmode != VOIDmode);
+ return cselib_subst_to_values (XEXP (x, 1), memmode);
+
case POST_DEC:
- case PRE_DEC:
+ case POST_INC:
case POST_MODIFY:
- case PRE_MODIFY:
- e = new_cselib_val (next_uid, GET_MODE (x), x);
- return e->val_rtx;
+ gcc_assert (memmode != VOIDmode);
+ return cselib_subst_to_values (XEXP (x, 0), memmode);
default:
break;
@@ -1578,7 +1703,7 @@ cselib_subst_to_values (rtx x)
{
if (fmt[i] == 'e')
{
- rtx t = cselib_subst_to_values (XEXP (x, i));
+ rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
if (t != XEXP (x, i))
{
@@ -1593,7 +1718,7 @@ cselib_subst_to_values (rtx x)
for (j = 0; j < XVECLEN (x, i); j++)
{
- rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
+ rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
if (t != XVECEXP (x, i, j))
{
@@ -1612,13 +1737,16 @@ cselib_subst_to_values (rtx x)
return copy;
}
-/* Look up the rtl expression X in our tables and return the value it has.
- If CREATE is zero, we return NULL if we don't know the value. Otherwise,
- we create a new one if possible, using mode MODE if X doesn't have a mode
- (i.e. because it's a constant). */
+/* Look up the rtl expression X in our tables and return the value it
+ has. If CREATE is zero, we return NULL if we don't know the value.
+ Otherwise, we create a new one if possible, using mode MODE if X
+ doesn't have a mode (i.e. because it's a constant). When X is part
+ of an address, MEMMODE should be the mode of the enclosing MEM if
+ we're tracking autoinc expressions. */
static cselib_val *
-cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
+cselib_lookup_1 (rtx x, enum machine_mode mode,
+ int create, enum machine_mode memmode)
{
void **slot;
cselib_val *e;
@@ -1667,7 +1795,7 @@ cselib_lookup_1 (rtx x, enum machine_mod
REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
}
REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
- slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
+ slot = cselib_find_slot (x, e->hash, INSERT, memmode);
*slot = e;
return e;
}
@@ -1675,13 +1803,13 @@ cselib_lookup_1 (rtx x, enum machine_mod
if (MEM_P (x))
return cselib_lookup_mem (x, create);
- hashval = cselib_hash_rtx (x, create);
+ hashval = cselib_hash_rtx (x, create, memmode);
/* Can't even create if hashing is not possible. */
if (! hashval)
return 0;
- slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
- hashval, create ? INSERT : NO_INSERT);
+ slot = cselib_find_slot (wrap_constant (mode, x), hashval,
+ create ? INSERT : NO_INSERT, memmode);
if (slot == 0)
return 0;
@@ -1695,7 +1823,8 @@ cselib_lookup_1 (rtx x, enum machine_mod
the hash table is inconsistent until we do so, and
cselib_subst_to_values will need to do lookups. */
*slot = (void *) e;
- e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
+ e->locs = new_elt_loc_list (e->locs,
+ cselib_subst_to_values (x, memmode));
return e;
}
@@ -1703,14 +1832,14 @@ cselib_lookup_1 (rtx x, enum machine_mod
cselib_val *
cselib_lookup_from_insn (rtx x, enum machine_mode mode,
- int create, rtx insn)
+ int create, enum machine_mode memmode, rtx insn)
{
cselib_val *ret;
gcc_assert (!cselib_current_insn);
cselib_current_insn = insn;
- ret = cselib_lookup (x, mode, create);
+ ret = cselib_lookup (x, mode, create, memmode);
cselib_current_insn = NULL;
@@ -1721,9 +1850,10 @@ cselib_lookup_from_insn (rtx x, enum mac
maintains invariants related with debug insns. */
cselib_val *
-cselib_lookup (rtx x, enum machine_mode mode, int create)
+cselib_lookup (rtx x, enum machine_mode mode,
+ int create, enum machine_mode memmode)
{
- cselib_val *ret = cselib_lookup_1 (x, mode, create);
+ cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
/* ??? Should we return NULL if we're not to create an entry, the
found loc is a debug loc and cselib_current_insn is not DEBUG?
@@ -1908,7 +2038,7 @@ cselib_invalidate_mem (rtx mem_rtx)
/* This one overlaps. */
/* We must have a mapping from this MEM's address to the
value (E). Remove that, too. */
- addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
+ addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
mem_chain = &addr->addr_list;
for (;;)
{
@@ -1958,13 +2088,6 @@ cselib_invalidate_rtx (rtx dest)
cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
else if (MEM_P (dest))
cselib_invalidate_mem (dest);
-
- /* Some machines don't define AUTO_INC_DEC, but they still use push
- instructions. We need to catch that case here in order to
- invalidate the stack pointer correctly. Note that invalidating
- the stack pointer is different from invalidating DEST. */
- if (push_operand (dest, GET_MODE (dest)))
- cselib_invalidate_rtx (stack_pointer_rtx);
}
/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
@@ -2027,6 +2150,36 @@ cselib_record_set (rtx dest, cselib_val
in a PARALLEL. Since it's fairly cheap, use a really large number. */
#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
+struct cselib_record_autoinc_data
+{
+ struct cselib_set *sets;
+ int n_sets;
+};
+
+/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
+ autoinc RTXs: SRCBASE plus SRCOFF if non-NULL is stored in
+ DEST. */
+
+static int
+cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
+ rtx dest, rtx srcbase, rtx srcoff, void *arg)
+{
+ struct cselib_record_autoinc_data *data;
+ data = (struct cselib_record_autoinc_data *)arg;
+
+ data->sets[data->n_sets].dest = dest;
+
+ if (srcoff)
+ data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (srcbase),
+ srcbase, srcoff);
+ else
+ data->sets[data->n_sets].src = srcbase;
+
+ data->n_sets++;
+
+ return -1;
+}
+
/* Record the effects of any sets in INSN. */
static void
cselib_record_sets (rtx insn)
@@ -2036,6 +2189,8 @@ cselib_record_sets (rtx insn)
struct cselib_set sets[MAX_SETS];
rtx body = PATTERN (insn);
rtx cond = 0;
+ int n_sets_before_autoinc;
+ struct cselib_record_autoinc_data data;
body = PATTERN (insn);
if (GET_CODE (body) == COND_EXEC)
@@ -2079,6 +2234,11 @@ cselib_record_sets (rtx insn)
sets[0].src = XEXP (note, 0);
}
+ data.sets = sets;
+ data.n_sets = n_sets_before_autoinc = n_sets;
+ for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
+ n_sets = data.n_sets;
+
/* Look up the values that are read. Do this before invalidating the
locations that are written. */
for (i = 0; i < n_sets; i++)
@@ -2097,14 +2257,15 @@ cselib_record_sets (rtx insn)
rtx src = sets[i].src;
if (cond)
src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
- sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
+ sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
if (MEM_P (dest))
{
enum machine_mode address_mode
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
- address_mode, 1);
+ address_mode, 1,
+ GET_MODE (dest));
}
else
sets[i].dest_addr_elt = 0;
@@ -2119,6 +2280,9 @@ cselib_record_sets (rtx insn)
locations may go away. */
note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
+ for (i = n_sets_before_autoinc; i < n_sets; i++)
+ cselib_invalidate_rtx (sets[i].dest);
+
/* If this is an asm, look for duplicate sets. This can happen when the
user uses the same value as an output multiple times. This is valid
if the outputs are not actually used thereafter. Treat this case as
===================================================================
@@ -743,7 +743,7 @@ use_narrower_mode_test (rtx *loc, void *
switch (GET_CODE (*loc))
{
case REG:
- if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
+ if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
return 1;
return -1;
case PLUS:
@@ -3984,8 +3984,10 @@ variable_post_merge_new_vals (void **slo
subsequent rounds. */
cselib_val *v;
gcc_assert (!cselib_lookup (node->loc,
- GET_MODE (node->loc), 0));
- v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
+ GET_MODE (node->loc), 0,
+ VOIDmode));
+ v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
+ VOIDmode);
cselib_preserve_value (v);
cselib_invalidate_rtx (node->loc);
cval = v->val_rtx;
@@ -4827,7 +4829,7 @@ find_use_val (rtx x, enum machine_mode m
return cui->sets[i].src_elt;
}
else
- return cselib_lookup (x, mode, 0);
+ return cselib_lookup (x, mode, 0, VOIDmode);
}
return NULL;
@@ -4856,14 +4858,15 @@ replace_expr_with_values (rtx loc)
else if (MEM_P (loc))
{
cselib_val *addr = cselib_lookup (XEXP (loc, 0),
- get_address_mode (loc), 0);
+ get_address_mode (loc), 0,
+ GET_MODE (loc));
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
return NULL;
}
else
- return cselib_subst_to_values (loc);
+ return cselib_subst_to_values (loc, VOIDmode);
}
/* Determine what kind of micro operation to choose for a USE. Return
@@ -4883,7 +4886,8 @@ use_type (rtx loc, struct count_use_info
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
if (! VAR_LOC_UNKNOWN_P (ploc))
{
- cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
+ cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
+ VOIDmode);
/* ??? flag_float_store and volatile mems are never
given values, but we could in theory use them for
@@ -4905,7 +4909,8 @@ use_type (rtx loc, struct count_use_info
if (REG_P (loc)
|| (find_use_val (loc, GET_MODE (loc), cui)
&& cselib_lookup (XEXP (loc, 0),
- get_address_mode (loc), 0)))
+ get_address_mode (loc), 0,
+ GET_MODE (loc))))
return MO_VAL_SET;
}
else
@@ -5067,13 +5072,15 @@ add_uses (rtx *ploc, void *data)
rtx mloc = vloc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
- = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
+ = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
+ GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
micro_operation moa;
preserve_value (val);
- mloc = cselib_subst_to_values (XEXP (mloc, 0));
+ mloc = cselib_subst_to_values (XEXP (mloc, 0),
+ GET_MODE (mloc));
moa.type = MO_VAL_USE;
moa.insn = cui->insn;
moa.u.loc = gen_rtx_CONCAT (address_mode,
@@ -5143,13 +5150,15 @@ add_uses (rtx *ploc, void *data)
rtx mloc = oloc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
- = cselib_lookup (XEXP (mloc, 0), address_mode, 0);
+ = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
+ GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
micro_operation moa;
preserve_value (val);
- mloc = cselib_subst_to_values (XEXP (mloc, 0));
+ mloc = cselib_subst_to_values (XEXP (mloc, 0),
+ GET_MODE (mloc));
moa.type = MO_VAL_USE;
moa.insn = cui->insn;
moa.u.loc = gen_rtx_CONCAT (address_mode,
@@ -5259,7 +5268,7 @@ reverse_op (rtx val, const_rtx expr)
if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
return NULL_RTX;
- v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
+ v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
if (!v || !cselib_preserved_value_p (v))
return NULL_RTX;
@@ -5380,13 +5389,15 @@ add_stores (rtx loc, const_rtx expr, voi
rtx mloc = loc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val = cselib_lookup (XEXP (mloc, 0),
- address_mode, 0);
+ address_mode, 0,
+ GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
preserve_value (val);
mo.type = MO_VAL_USE;
- mloc = cselib_subst_to_values (XEXP (mloc, 0));
+ mloc = cselib_subst_to_values (XEXP (mloc, 0),
+ GET_MODE (mloc));
mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
mo.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -5445,7 +5456,7 @@ add_stores (rtx loc, const_rtx expr, voi
if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
{
- cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
+ cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
gcc_assert (oval != v);
gcc_assert (REG_P (oloc) || MEM_P (oloc));
@@ -8139,7 +8150,8 @@ vt_add_function_parameters (void)
if (offset)
continue;
- val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
+ val = cselib_lookup (var_lowpart (mode, incoming), mode, true,
+ VOIDmode);
/* ??? Float-typed values in memory are not handled by
cselib. */
@@ -8227,7 +8239,7 @@ vt_init_cfa_base (void)
return;
val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
- get_insns ());
+ VOIDmode, get_insns ());
preserve_value (val);
cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,
===================================================================
@@ -1562,7 +1562,7 @@ add_insn_mem_dependence (struct deps_des
if (sched_deps_info->use_cselib)
{
mem = shallow_copy_rtx (mem);
- XEXP (mem, 0) = cselib_subst_to_values (XEXP (mem, 0));
+ XEXP (mem, 0) = cselib_subst_to_values (XEXP (mem, 0), GET_MODE (mem));
}
link = alloc_EXPR_LIST (VOIDmode, canon_rtx (mem), *mem_list);
*mem_list = link;
@@ -2279,8 +2279,9 @@ sched_analyze_1 (struct deps_desc *deps,
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
t = shallow_copy_rtx (dest);
- cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1, insn);
- XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
+ cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
+ GET_MODE (t), insn);
+ XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
}
t = canon_rtx (t);
@@ -2436,8 +2437,9 @@ sched_analyze_2 (struct deps_desc *deps,
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (t));
t = shallow_copy_rtx (t);
- cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1, insn);
- XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
+ cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
+ GET_MODE (t), insn);
+ XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
}
if (!DEBUG_INSN_P (insn))
===================================================================
@@ -2740,7 +2740,7 @@ do_local_cprop (rtx x, rtx insn)
|| (GET_CODE (PATTERN (insn)) != USE
&& asm_noperands (PATTERN (insn)) < 0)))
{
- cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
+ cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
struct elt_loc_list *l;
if (!val)
===================================================================
@@ -263,7 +263,7 @@ reload_cse_simplify_set (rtx set, rtx in
return 0;
#endif
- val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
+ val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
if (! val)
return 0;
@@ -477,7 +477,7 @@ reload_cse_simplify_operands (rtx insn,
continue;
}
#endif /* LOAD_EXTEND_OP */
- v = cselib_lookup (op, recog_data.operand_mode[i], 0);
+ v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
if (! v)
continue;
===================================================================
@@ -961,8 +961,8 @@ debug_mem_addr_value (rtx x)
address_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
t = shallow_copy_rtx (x);
- if (cselib_lookup (XEXP (t, 0), address_mode, 0))
- XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
+ if (cselib_lookup (XEXP (t, 0), address_mode, 0, GET_MODE (t)))
+ XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
t = canon_rtx (t);
addr = get_addr (XEXP (t, 0));