@@ -28117,9 +28117,9 @@ emit_unlikely_jump (rtx insn)
void
arm_expand_compare_and_swap (rtx operands[])
{
- rtx bval, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
+ rtx bval, bdst, rval, mem, oldval, newval, is_weak, mod_s, mod_f, x;
machine_mode mode;
- rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+ rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
bval = operands[0];
rval = operands[1];
@@ -28176,43 +28176,54 @@ arm_expand_compare_and_swap (rtx operands[])
gcc_unreachable ();
}
- emit_insn (gen (rval, mem, oldval, newval, is_weak, mod_s, mod_f));
+ bdst = TARGET_THUMB1 ? bval : gen_rtx_REG (CCmode, CC_REGNUM);
+ emit_insn (gen (bdst, rval, mem, oldval, newval, is_weak, mod_s, mod_f));
if (mode == QImode || mode == HImode)
emit_move_insn (operands[1], gen_lowpart (mode, rval));
/* In all cases, we arrange for success to be signaled by Z set.
This arrangement allows for the boolean result to be used directly
- in a subsequent branch, post optimization. */
- x = gen_rtx_REG (CCmode, CC_REGNUM);
- x = gen_rtx_EQ (SImode, x, const0_rtx);
- emit_insn (gen_rtx_SET (bval, x));
+ in a subsequent branch, post optimization. For Thumb-1 targets, the
+ boolean negation of the result is also stored in bval because Thumb-1
+ backend lacks dependency tracking for CC flag due to flag-setting not
+ being represented at RTL level. */
+ if (TARGET_THUMB1)
+ gen_cstoresi_eq0_thumb1 (bval, bdst);
+ else
+ {
+ x = gen_rtx_EQ (SImode, bdst, const0_rtx);
+ emit_insn (gen_rtx_SET (bval, x));
+ }
}
/* Split a compare and swap pattern. It is IMPLEMENTATION DEFINED whether
another memory store between the load-exclusive and store-exclusive can
reset the monitor from Exclusive to Open state. This means we must wait
until after reload to split the pattern, lest we get a register spill in
- the middle of the atomic sequence. */
+ the middle of the atomic sequence. Success of the compare and swap is
+ indicated by the Z flag set for 32bit targets and by neg_bval being zero
+ for Thumb-1 targets (ie. negation of the boolean value returned by
+ atomic_compare_and_swapmode standard pattern in operand 0). */
void
arm_split_compare_and_swap (rtx operands[])
{
- rtx rval, mem, oldval, newval, scratch;
+ rtx rval, mem, oldval, newval, neg_bval;
machine_mode mode;
enum memmodel mod_s, mod_f;
bool is_weak;
rtx_code_label *label1, *label2;
rtx x, cond;
- rval = operands[0];
- mem = operands[1];
- oldval = operands[2];
- newval = operands[3];
- is_weak = (operands[4] != const0_rtx);
- mod_s = memmodel_from_int (INTVAL (operands[5]));
- mod_f = memmodel_from_int (INTVAL (operands[6]));
- scratch = operands[7];
+ rval = operands[1];
+ mem = operands[2];
+ oldval = operands[3];
+ newval = operands[4];
+ is_weak = (operands[5] != const0_rtx);
+ mod_s = memmodel_from_int (INTVAL (operands[6]));
+ mod_f = memmodel_from_int (INTVAL (operands[7]));
+ neg_bval = TARGET_THUMB1 ? operands[0] : operands[8];
mode = GET_MODE (mem);
bool is_armv8_sync = arm_arch8 && is_mm_sync (mod_s);
@@ -28244,26 +28255,44 @@ arm_split_compare_and_swap (rtx operands[])
arm_emit_load_exclusive (mode, rval, mem, use_acquire);
- cond = arm_gen_compare_reg (NE, rval, oldval, scratch);
- x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
- gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
- emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ /* Z is set to 0 for 32bit targets (resp. rval set to 1) if oldval != rval,
+ as required to communicate with arm_expand_compare_and_swap. */
+ if (TARGET_32BIT)
+ {
+ cond = arm_gen_compare_reg (NE, rval, oldval, neg_bval);
+ x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
+ x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
+ gen_rtx_LABEL_REF (Pmode, label2), pc_rtx);
+ emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ }
+ else
+ {
+ emit_move_insn (neg_bval, const1_rtx);
+ cond = gen_rtx_NE (VOIDmode, rval, oldval);
+ if (thumb1_cmpneg_operand (oldval, SImode))
+ emit_unlikely_jump (gen_cbranchsi4_scratch (neg_bval, rval, oldval,
+ label2, cond));
+ else
+ emit_unlikely_jump (gen_cbranchsi4_insn (cond, rval, oldval, label2));
+ }
- arm_emit_store_exclusive (mode, scratch, mem, newval, use_release);
+ arm_emit_store_exclusive (mode, neg_bval, mem, newval, use_release);
/* Weak or strong, we want EQ to be true for success, so that we
match the flags that we got from the compare above. */
- cond = gen_rtx_REG (CCmode, CC_REGNUM);
- x = gen_rtx_COMPARE (CCmode, scratch, const0_rtx);
- emit_insn (gen_rtx_SET (cond, x));
+ if (TARGET_32BIT)
+ {
+ cond = gen_rtx_REG (CCmode, CC_REGNUM);
+ x = gen_rtx_COMPARE (CCmode, neg_bval, const0_rtx);
+ emit_insn (gen_rtx_SET (cond, x));
+ }
if (!is_weak)
{
- x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
- x = gen_rtx_IF_THEN_ELSE (VOIDmode, x,
- gen_rtx_LABEL_REF (Pmode, label1), pc_rtx);
- emit_unlikely_jump (gen_rtx_SET (pc_rtx, x));
+ /* Z is set to boolean value of !neg_bval, as required to communicate
+ with arm_expand_compare_and_swap. */
+ x = gen_rtx_NE (VOIDmode, neg_bval, const0_rtx);
+ emit_unlikely_jump (gen_cbranchsi4 (x, neg_bval, const0_rtx, label1));
}
if (!is_mm_relaxed (mod_f))
@@ -396,6 +396,12 @@
|| mode == CC_DGTUmode));
})
+;; Any register, including CC
+(define_predicate "cc_register_operand"
+ (and (match_code "reg")
+ (ior (match_operand 0 "s_register_operand")
+ (match_operand 0 "cc_register"))))
+
(define_special_predicate "arm_extendqisi_mem_op"
(and (match_operand 0 "memory_operand")
(match_test "TARGET_ARM ? arm_legitimate_address_outer_p (mode,
@@ -190,20 +190,20 @@
})
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
- [(set (reg:CC_Z CC_REGNUM) ;; bool out
+ [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out
(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SI 0 "s_register_operand" "=&r") ;; val out
+ (set (match_operand:SI 1 "s_register_operand" "=&r") ;; val out
(zero_extend:SI
- (match_operand:NARROW 1 "mem_noofs_operand" "+Ua"))) ;; memory
- (set (match_dup 1)
+ (match_operand:NARROW 2 "mem_noofs_operand" "+Ua"))) ;; memory
+ (set (match_dup 2)
(unspec_volatile:NARROW
- [(match_operand:SI 2 "arm_add_operand" "rIL") ;; expected
- (match_operand:NARROW 3 "s_register_operand" "r") ;; desired
- (match_operand:SI 4 "const_int_operand") ;; is_weak
- (match_operand:SI 5 "const_int_operand") ;; mod_s
- (match_operand:SI 6 "const_int_operand")] ;; mod_f
+ [(match_operand:SI 3 "arm_add_operand" "rIL") ;; expected
+ (match_operand:NARROW 4 "s_register_operand" "r") ;; desired
+ (match_operand:SI 5 "const_int_operand") ;; is_weak
+ (match_operand:SI 6 "const_int_operand") ;; mod_s
+ (match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 7 "=&r"))]
+ (clobber (match_scratch:SI 8 "=&r"))]
"<sync_predtab>"
"#"
"&& reload_completed"
@@ -219,19 +219,19 @@
[(SI "rIL") (DI "rDi")])
(define_insn_and_split "atomic_compare_and_swap<mode>_1"
- [(set (reg:CC_Z CC_REGNUM) ;; bool out
+ [(set (match_operand 0 "cc_register_operand" "=&c") ;; bool out
(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
- (set (match_operand:SIDI 0 "s_register_operand" "=&r") ;; val out
- (match_operand:SIDI 1 "mem_noofs_operand" "+Ua")) ;; memory
- (set (match_dup 1)
+ (set (match_operand:SIDI 1 "s_register_operand" "=&r") ;; val out
+ (match_operand:SIDI 2 "mem_noofs_operand" "+Ua")) ;; memory
+ (set (match_dup 2)
(unspec_volatile:SIDI
- [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
- (match_operand:SIDI 3 "s_register_operand" "r") ;; desired
- (match_operand:SI 4 "const_int_operand") ;; is_weak
- (match_operand:SI 5 "const_int_operand") ;; mod_s
- (match_operand:SI 6 "const_int_operand")] ;; mod_f
+ [(match_operand:SIDI 3 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
+ (match_operand:SIDI 4 "s_register_operand" "r") ;; desired
+ (match_operand:SI 5 "const_int_operand") ;; is_weak
+ (match_operand:SI 6 "const_int_operand") ;; mod_s
+ (match_operand:SI 7 "const_int_operand")] ;; mod_f
VUNSPEC_ATOMIC_CAS))
- (clobber (match_scratch:SI 7 "=&r"))]
+ (clobber (match_scratch:SI 8 "=&r"))]
"<sync_predtab>"
"#"
"&& reload_completed"