@@ -3989,7 +3989,8 @@ (define_expand "while_ultsidi"
(define_expand "maskload<mode>di"
[(match_operand:V_MOV 0 "register_operand")
(match_operand:V_MOV 1 "memory_operand")
- (match_operand 2 "")]
+ (match_operand 2 "")
+ (match_operand:V_MOV 3 "maskload_else_operand")]
""
{
rtx exec = force_reg (DImode, operands[2]);
@@ -3998,9 +3999,6 @@ (define_expand "maskload<mode>di"
rtx as = gen_rtx_CONST_INT (VOIDmode, MEM_ADDR_SPACE (operands[1]));
rtx v = gen_rtx_CONST_INT (VOIDmode, MEM_VOLATILE_P (operands[1]));
- /* Masked lanes are required to hold zero. */
- emit_move_insn (operands[0], gcn_vec_constant (<MODE>mode, 0));
-
emit_insn (gen_gather<mode>_expr_exec (operands[0], addr, as, v,
operands[0], exec));
DONE;
@@ -4027,7 +4025,8 @@ (define_expand "mask_gather_load<mode><vnsi>"
(match_operand:<VnSI> 2 "register_operand")
(match_operand 3 "immediate_operand")
(match_operand:SI 4 "gcn_alu_operand")
- (match_operand:DI 5 "")]
+ (match_operand:DI 5 "")
+ (match_operand:V_MOV 6 "maskload_else_operand")]
""
{
rtx exec = force_reg (DImode, operands[5]);
@@ -4036,9 +4035,6 @@ (define_expand "mask_gather_load<mode><vnsi>"
operands[2], operands[4],
INTVAL (operands[3]), exec);
- /* Masked lanes are required to hold zero. */
- emit_move_insn (operands[0], gcn_vec_constant (<MODE>mode, 0));
-
if (GET_MODE (addr) == <VnDI>mode)
emit_insn (gen_gather<mode>_insn_1offset_exec (operands[0], addr,
const0_rtx, const0_rtx,
@@ -228,3 +228,5 @@ (define_predicate "ascending_zero_int_parallel"
return gcn_stepped_zero_int_parallel_p (op, 1);
})
+(define_predicate "maskload_else_operand"
+ (match_operand 0 "scratch_operand"))