@@ -23,6 +23,8 @@
#define GCC_AARCH64_OPTS_H
#ifndef USED_FOR_TARGET
+typedef uint64_t aarch64_isa_mode;
+
typedef uint64_t aarch64_feature_flags;
constexpr unsigned int AARCH64_NUM_ISA_MODES = (0
@@ -767,7 +767,7 @@ bool aarch64_constant_address_p (rtx);
bool aarch64_emit_approx_div (rtx, rtx, rtx);
bool aarch64_emit_approx_sqrt (rtx, rtx, bool);
tree aarch64_vector_load_decl (tree);
-rtx aarch64_gen_callee_cookie (aarch64_feature_flags, arm_pcs);
+rtx aarch64_gen_callee_cookie (aarch64_isa_mode, arm_pcs);
void aarch64_expand_call (rtx, rtx, rtx, bool);
bool aarch64_expand_cpymem_mops (rtx *, bool);
bool aarch64_expand_cpymem (rtx *, bool);
@@ -808,7 +808,7 @@ int aarch64_add_offset_temporaries (rtx);
void aarch64_split_add_offset (scalar_int_mode, rtx, rtx, rtx, rtx, rtx);
bool aarch64_rdsvl_immediate_p (const_rtx);
rtx aarch64_sme_vq_immediate (machine_mode mode, HOST_WIDE_INT,
- aarch64_feature_flags);
+ aarch64_isa_mode);
char *aarch64_output_rdsvl (const_rtx);
bool aarch64_addsvl_addspl_immediate_p (const_rtx);
char *aarch64_output_addsvl_addspl (rtx);
@@ -181,7 +181,17 @@ enum class aarch64_feature : unsigned char {
#include "aarch64-arches.def"
#undef HANDLE
-constexpr auto AARCH64_FL_SM_STATE = AARCH64_FL_SM_ON | AARCH64_FL_SM_OFF;
+/* Define aarch64_isa_mode masks. */
+#define DEF_AARCH64_ISA_MODE(IDENT) \
+ constexpr auto AARCH64_ISA_MODE_##IDENT ATTRIBUTE_UNUSED \
+ = aarch64_isa_mode (1) << int (aarch64_feature::IDENT);
+#include "aarch64-isa-modes.def"
+#undef HANDLE
+
+constexpr auto AARCH64_FL_SM_STATE ATTRIBUTE_UNUSED
+ = AARCH64_FL_SM_ON | AARCH64_FL_SM_OFF;
+constexpr auto AARCH64_ISA_MODE_SM_STATE ATTRIBUTE_UNUSED
+ = AARCH64_ISA_MODE_SM_ON | AARCH64_ISA_MODE_SM_OFF;
/* The mask of all ISA modes. */
constexpr auto AARCH64_FL_ISA_MODES
@@ -189,7 +199,10 @@ constexpr auto AARCH64_FL_ISA_MODES
/* The default ISA mode, for functions with no attributes that specify
something to the contrary. */
-constexpr auto AARCH64_FL_DEFAULT_ISA_MODE = AARCH64_FL_SM_OFF;
+constexpr auto AARCH64_DEFAULT_ISA_MODE ATTRIBUTE_UNUSED
+ = AARCH64_ISA_MODE_SM_OFF;
+constexpr auto AARCH64_FL_DEFAULT_ISA_MODE ATTRIBUTE_UNUSED
+ = aarch64_feature_flags (AARCH64_DEFAULT_ISA_MODE);
#endif
@@ -202,7 +215,7 @@ constexpr auto AARCH64_FL_DEFAULT_ISA_MODE = AARCH64_FL_SM_OFF;
#define AARCH64_ISA_SM_OFF (aarch64_isa_flags & AARCH64_FL_SM_OFF)
#define AARCH64_ISA_SM_ON (aarch64_isa_flags & AARCH64_FL_SM_ON)
#define AARCH64_ISA_ZA_ON (aarch64_isa_flags & AARCH64_FL_ZA_ON)
-#define AARCH64_ISA_MODE (aarch64_isa_flags & AARCH64_FL_ISA_MODES)
+#define AARCH64_ISA_MODE (aarch64_isa_mode) (aarch64_isa_flags & AARCH64_FL_ISA_MODES)
#define AARCH64_ISA_V8A (aarch64_isa_flags & AARCH64_FL_V8A)
#define AARCH64_ISA_V8_1A (aarch64_isa_flags & AARCH64_FL_V8_1A)
#define AARCH64_ISA_CRC (aarch64_isa_flags & AARCH64_FL_CRC)
@@ -1131,7 +1144,7 @@ enum arm_pcs
typedef struct
{
enum arm_pcs pcs_variant;
- aarch64_feature_flags isa_mode;
+ aarch64_isa_mode isa_mode;
int aapcs_arg_processed; /* No need to lay out this argument again. */
int aapcs_ncrn; /* Next Core register number. */
int aapcs_nextncrn; /* Next next core register number. */
@@ -2166,17 +2166,17 @@ aarch64_fntype_abi (const_tree fntype)
/* Return the state of PSTATE.SM on entry to functions of type FNTYPE. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fntype_pstate_sm (const_tree fntype)
{
if (lookup_attribute ("arm", "streaming", TYPE_ATTRIBUTES (fntype)))
- return AARCH64_FL_SM_ON;
+ return AARCH64_ISA_MODE_SM_ON;
if (lookup_attribute ("arm", "streaming_compatible",
TYPE_ATTRIBUTES (fntype)))
return 0;
- return AARCH64_FL_SM_OFF;
+ return AARCH64_ISA_MODE_SM_OFF;
}
/* Return state flags that describe whether and how functions of type
@@ -2191,19 +2191,19 @@ aarch64_fntype_shared_flags (const_tree fntype, const char *state_name)
/* Return the state of PSTATE.ZA on entry to functions of type FNTYPE. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fntype_pstate_za (const_tree fntype)
{
if (aarch64_fntype_shared_flags (fntype, "za")
|| aarch64_fntype_shared_flags (fntype, "zt0"))
- return AARCH64_FL_ZA_ON;
+ return AARCH64_ISA_MODE_ZA_ON;
return 0;
}
/* Return the ISA mode on entry to functions of type FNTYPE. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fntype_isa_mode (const_tree fntype)
{
return (aarch64_fntype_pstate_sm (fntype)
@@ -2224,11 +2224,11 @@ aarch64_fndecl_is_locally_streaming (const_tree fndecl)
function FNDECL. This might be different from the state of
PSTATE.SM on entry. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fndecl_pstate_sm (const_tree fndecl)
{
if (aarch64_fndecl_is_locally_streaming (fndecl))
- return AARCH64_FL_SM_ON;
+ return AARCH64_ISA_MODE_SM_ON;
return aarch64_fntype_pstate_sm (TREE_TYPE (fndecl));
}
@@ -2247,12 +2247,12 @@ aarch64_fndecl_has_state (tree fndecl, const char *state_name)
/* Return the state of PSTATE.ZA when compiling the body of function FNDECL.
This might be different from the state of PSTATE.ZA on entry. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fndecl_pstate_za (const_tree fndecl)
{
if (aarch64_fndecl_has_new_state (fndecl, "za")
|| aarch64_fndecl_has_new_state (fndecl, "zt0"))
- return AARCH64_FL_ZA_ON;
+ return AARCH64_ISA_MODE_ZA_ON;
return aarch64_fntype_pstate_za (TREE_TYPE (fndecl));
}
@@ -2260,7 +2260,7 @@ aarch64_fndecl_pstate_za (const_tree fndecl)
/* Return the ISA mode that should be used to compile the body of
function FNDECL. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_fndecl_isa_mode (const_tree fndecl)
{
return (aarch64_fndecl_pstate_sm (fndecl)
@@ -2271,7 +2271,7 @@ aarch64_fndecl_isa_mode (const_tree fndecl)
This might be different from the state of PSTATE.SM in the function
body. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_cfun_incoming_pstate_sm ()
{
return aarch64_fntype_pstate_sm (TREE_TYPE (cfun->decl));
@@ -2281,7 +2281,7 @@ aarch64_cfun_incoming_pstate_sm ()
This might be different from the state of PSTATE.ZA in the function
body. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_cfun_incoming_pstate_za ()
{
return aarch64_fntype_pstate_za (TREE_TYPE (cfun->decl));
@@ -2313,7 +2313,7 @@ static bool
aarch64_cfun_enables_pstate_sm ()
{
return (aarch64_fndecl_is_locally_streaming (cfun->decl)
- && aarch64_cfun_incoming_pstate_sm () != AARCH64_FL_SM_ON);
+ && aarch64_cfun_incoming_pstate_sm () != AARCH64_ISA_MODE_SM_ON);
}
/* Return true if the current function has state STATE_NAME, either by
@@ -2330,9 +2330,9 @@ aarch64_cfun_has_state (const char *state_name)
the BL instruction. */
static bool
-aarch64_call_switches_pstate_sm (aarch64_feature_flags callee_mode)
+aarch64_call_switches_pstate_sm (aarch64_isa_mode callee_mode)
{
- return (callee_mode & ~AARCH64_ISA_MODE & AARCH64_FL_SM_STATE) != 0;
+ return (bool) (callee_mode & ~AARCH64_ISA_MODE & AARCH64_ISA_MODE_SM_STATE);
}
/* Implement TARGET_COMPATIBLE_VECTOR_TYPES_P. */
@@ -2401,7 +2401,7 @@ aarch64_reg_save_mode (unsigned int regno)
return the CONST_INT that should be placed in an UNSPEC_CALLEE_ABI rtx. */
rtx
-aarch64_gen_callee_cookie (aarch64_feature_flags isa_mode, arm_pcs pcs_variant)
+aarch64_gen_callee_cookie (aarch64_isa_mode isa_mode, arm_pcs pcs_variant)
{
return gen_int_mode ((unsigned int) isa_mode
| (unsigned int) pcs_variant << AARCH64_NUM_ISA_MODES,
@@ -2421,10 +2421,10 @@ aarch64_callee_abi (rtx cookie)
required ISA mode on entry to the callee, which is also the ISA
mode on return from the callee. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_callee_isa_mode (rtx cookie)
{
- return UINTVAL (cookie) & AARCH64_FL_ISA_MODES;
+ return UINTVAL (cookie) & ((1 << AARCH64_NUM_ISA_MODES) - 1);
}
/* INSN is a call instruction. Return the CONST_INT stored in its
@@ -2452,7 +2452,7 @@ aarch64_insn_callee_abi (const rtx_insn *insn)
/* INSN is a call instruction. Return the required ISA mode on entry to
the callee, which is also the ISA mode on return from the callee. */
-static aarch64_feature_flags
+static aarch64_isa_mode
aarch64_insn_callee_isa_mode (const rtx_insn *insn)
{
return aarch64_callee_isa_mode (aarch64_insn_callee_cookie (insn));
@@ -3986,10 +3986,10 @@ aarch64_output_sve_vector_inc_dec (const char *operands, rtx x)
rtx
aarch64_sme_vq_immediate (machine_mode mode, HOST_WIDE_INT factor,
- aarch64_feature_flags isa_mode)
+ aarch64_isa_mode isa_mode)
{
gcc_assert (aarch64_sve_rdvl_addvl_factor_p (factor));
- if (isa_mode & AARCH64_FL_SM_ON)
+ if (isa_mode & AARCH64_ISA_MODE_SM_ON)
/* We're in streaming mode, so we can use normal poly-int values. */
return gen_int_mode ({ factor, factor }, mode);
@@ -4640,7 +4640,7 @@ aarch64_add_offset_temporaries (rtx x)
TEMP2, if nonnull, is a second temporary register that doesn't
overlap either DEST or REG.
- FORCE_ISA_MODE is AARCH64_FL_SM_ON if any variable component of OFFSET
+ FORCE_ISA_MODE is AARCH64_ISA_MODE_SM_ON if any variable component of OFFSET
is measured relative to the SME vector length instead of the current
prevailing vector length. It is 0 otherwise.
@@ -4652,7 +4652,7 @@ aarch64_add_offset_temporaries (rtx x)
static void
aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src,
poly_int64 offset, rtx temp1, rtx temp2,
- aarch64_feature_flags force_isa_mode,
+ aarch64_isa_mode force_isa_mode,
bool frame_related_p, bool emit_move_imm = true)
{
gcc_assert (emit_move_imm || temp1 != NULL_RTX);
@@ -4673,7 +4673,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src,
offset_rtx = aarch64_sme_vq_immediate (mode, offset.coeffs[0], 0);
rtx_insn *insn = emit_insn (gen_add3_insn (dest, src, offset_rtx));
RTX_FRAME_RELATED_P (insn) = frame_related_p;
- if (frame_related_p && (force_isa_mode & AARCH64_FL_SM_ON))
+ if (frame_related_p && (force_isa_mode & AARCH64_ISA_MODE_SM_ON))
add_reg_note (insn, REG_CFA_ADJUST_CFA,
gen_rtx_SET (dest, plus_constant (Pmode, src,
offset)));
@@ -4701,7 +4701,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src,
{
rtx_insn *insn = emit_insn (gen_add3_insn (dest, src, offset_rtx));
RTX_FRAME_RELATED_P (insn) = true;
- if (force_isa_mode & AARCH64_FL_SM_ON)
+ if (force_isa_mode & AARCH64_ISA_MODE_SM_ON)
add_reg_note (insn, REG_CFA_ADJUST_CFA,
gen_rtx_SET (dest, plus_constant (Pmode, src,
poly_offset)));
@@ -4735,7 +4735,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src,
rtx val;
if (IN_RANGE (rel_factor, -32, 31))
{
- if (force_isa_mode & AARCH64_FL_SM_ON)
+ if (force_isa_mode & AARCH64_ISA_MODE_SM_ON)
{
/* Try to use an unshifted RDSVL, otherwise fall back on
a shifted RDSVL #1. */
@@ -4783,7 +4783,7 @@ aarch64_add_offset (scalar_int_mode mode, rtx dest, rtx src,
val = gen_int_mode (poly_int64 (low_bit, low_bit), mode);
shift = 0;
}
- else if ((force_isa_mode & AARCH64_FL_SM_ON)
+ else if ((force_isa_mode & AARCH64_ISA_MODE_SM_ON)
&& aarch64_sve_rdvl_addvl_factor_p (low_bit))
{
val = aarch64_sme_vq_immediate (mode, low_bit, 0);
@@ -4886,7 +4886,7 @@ aarch64_split_add_offset (scalar_int_mode mode, rtx dest, rtx src,
static inline void
aarch64_add_sp (rtx temp1, rtx temp2, poly_int64 delta,
- aarch64_feature_flags force_isa_mode, bool emit_move_imm)
+ aarch64_isa_mode force_isa_mode, bool emit_move_imm)
{
aarch64_add_offset (Pmode, stack_pointer_rtx, stack_pointer_rtx, delta,
temp1, temp2, force_isa_mode, true, emit_move_imm);
@@ -4898,7 +4898,7 @@ aarch64_add_sp (rtx temp1, rtx temp2, poly_int64 delta,
static inline void
aarch64_sub_sp (rtx temp1, rtx temp2, poly_int64 delta,
- aarch64_feature_flags force_isa_mode,
+ aarch64_isa_mode force_isa_mode,
bool frame_related_p, bool emit_move_imm = true)
{
aarch64_add_offset (Pmode, stack_pointer_rtx, stack_pointer_rtx, -delta,
@@ -4915,11 +4915,11 @@ aarch64_sub_sp (rtx temp1, rtx temp2, poly_int64 delta,
matches LOCAL_MODE. Return the label that the branch jumps to. */
static rtx_insn *
-aarch64_guard_switch_pstate_sm (rtx old_svcr, aarch64_feature_flags local_mode)
+aarch64_guard_switch_pstate_sm (rtx old_svcr, aarch64_isa_mode local_mode)
{
- local_mode &= AARCH64_FL_SM_STATE;
+ local_mode &= AARCH64_ISA_MODE_SM_STATE;
gcc_assert (local_mode != 0);
- auto already_ok_cond = (local_mode & AARCH64_FL_SM_ON ? NE : EQ);
+ auto already_ok_cond = (local_mode & AARCH64_ISA_MODE_SM_ON ? NE : EQ);
auto *label = gen_label_rtx ();
auto branch = aarch64_gen_test_and_branch (already_ok_cond, old_svcr, 0,
label);
@@ -4933,15 +4933,14 @@ aarch64_guard_switch_pstate_sm (rtx old_svcr, aarch64_feature_flags local_mode)
an SMSTOP SM. */
static void
-aarch64_switch_pstate_sm (aarch64_feature_flags old_mode,
- aarch64_feature_flags new_mode)
+aarch64_switch_pstate_sm (aarch64_isa_mode old_mode, aarch64_isa_mode new_mode)
{
- old_mode &= AARCH64_FL_SM_STATE;
- new_mode &= AARCH64_FL_SM_STATE;
+ old_mode &= AARCH64_ISA_MODE_SM_STATE;
+ new_mode &= AARCH64_ISA_MODE_SM_STATE;
gcc_assert (old_mode != new_mode);
- if ((new_mode & AARCH64_FL_SM_ON)
- || (new_mode == 0 && (old_mode & AARCH64_FL_SM_OFF)))
+ if ((new_mode & AARCH64_ISA_MODE_SM_ON)
+ || (!new_mode && (old_mode & AARCH64_ISA_MODE_SM_OFF)))
emit_insn (gen_aarch64_smstart_sm ());
else
emit_insn (gen_aarch64_smstop_sm ());
@@ -7222,7 +7221,7 @@ aarch64_init_cumulative_args (CUMULATIVE_ARGS *pcum,
else
{
pcum->pcs_variant = ARM_PCS_AAPCS64;
- pcum->isa_mode = AARCH64_FL_DEFAULT_ISA_MODE;
+ pcum->isa_mode = AARCH64_DEFAULT_ISA_MODE;
}
pcum->aapcs_reg = NULL_RTX;
pcum->aapcs_arg_processed = false;
@@ -9157,14 +9156,14 @@ aarch64_emit_stack_tie (rtx reg)
then the signal handler doesn't know the state of the stack and can make no
assumptions about which pages have been probed.
- FORCE_ISA_MODE is AARCH64_FL_SM_ON if any variable component of POLY_SIZE
+ FORCE_ISA_MODE is AARCH64_ISA_MODE_SM_ON if any variable component of POLY_SIZE
is measured relative to the SME vector length instead of the current
prevailing vector length. It is 0 otherwise. */
static void
aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2,
poly_int64 poly_size,
- aarch64_feature_flags force_isa_mode,
+ aarch64_isa_mode force_isa_mode,
bool frame_related_p,
bool final_adjustment_p)
{
@@ -9477,7 +9476,7 @@ aarch64_read_old_svcr (unsigned int regno)
static rtx_insn *
aarch64_guard_switch_pstate_sm (unsigned int regno,
- aarch64_feature_flags local_mode)
+ aarch64_isa_mode local_mode)
{
rtx old_svcr = aarch64_read_old_svcr (regno);
return aarch64_guard_switch_pstate_sm (old_svcr, local_mode);
@@ -9578,9 +9577,9 @@ aarch64_expand_prologue (void)
unsigned reg2 = frame.wb_push_candidate2;
bool emit_frame_chain = frame.emit_frame_chain;
rtx_insn *insn;
- aarch64_feature_flags force_isa_mode = 0;
+ aarch64_isa_mode force_isa_mode = 0;
if (aarch64_cfun_enables_pstate_sm ())
- force_isa_mode = AARCH64_FL_SM_ON;
+ force_isa_mode = AARCH64_ISA_MODE_SM_ON;
if (flag_stack_clash_protection
&& known_eq (callee_adjust, 0)
@@ -9784,7 +9783,7 @@ aarch64_expand_prologue (void)
emit_insn (gen_aarch64_read_svcr (svcr));
emit_move_insn (aarch64_old_svcr_mem (), svcr);
guard_label = aarch64_guard_switch_pstate_sm (svcr,
- aarch64_isa_flags);
+ AARCH64_ISA_MODE);
}
aarch64_sme_mode_switch_regs args_switch;
auto &args = crtl->args.info;
@@ -9845,9 +9844,9 @@ aarch64_expand_epilogue (rtx_call_insn *sibcall)
HOST_WIDE_INT guard_size
= 1 << param_stack_clash_protection_guard_size;
HOST_WIDE_INT guard_used_by_caller = STACK_CLASH_CALLER_GUARD;
- aarch64_feature_flags force_isa_mode = 0;
+ aarch64_isa_mode force_isa_mode = 0;
if (aarch64_cfun_enables_pstate_sm ())
- force_isa_mode = AARCH64_FL_SM_ON;
+ force_isa_mode = AARCH64_ISA_MODE_SM_ON;
/* We can re-use the registers when:
@@ -9877,8 +9876,10 @@ aarch64_expand_epilogue (rtx_call_insn *sibcall)
{
rtx_insn *guard_label = nullptr;
if (known_ge (cfun->machine->frame.old_svcr_offset, 0))
- guard_label = aarch64_guard_switch_pstate_sm (IP0_REGNUM,
- aarch64_isa_flags);
+ {
+ guard_label = aarch64_guard_switch_pstate_sm (IP0_REGNUM,
+ AARCH64_ISA_MODE);
+ }
aarch64_sme_mode_switch_regs return_switch;
if (sibcall)
return_switch.add_call_args (sibcall);
@@ -11159,7 +11160,7 @@ aarch64_start_call_args (cumulative_args_t ca_v)
{
CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
- if (!TARGET_SME && (ca->isa_mode & AARCH64_FL_SM_ON))
+ if (!TARGET_SME && (ca->isa_mode & AARCH64_ISA_MODE_SM_ON))
{
error ("calling a streaming function requires the ISA extension %qs",
"sme");
@@ -11176,20 +11177,20 @@ aarch64_start_call_args (cumulative_args_t ca_v)
&& !aarch64_cfun_has_state ("zt0"))
error ("call to a function that shares %qs state from a function"
" that has no %qs state", "zt0", "zt0");
- else if (!TARGET_ZA && (ca->isa_mode & AARCH64_FL_ZA_ON))
+ else if (!TARGET_ZA && (ca->isa_mode & AARCH64_ISA_MODE_ZA_ON))
error ("call to a function that shares SME state from a function"
" that has no SME state");
/* If this is a call to a private ZA function, emit a marker to
indicate where any necessary set-up code could be inserted.
The code itself is inserted by the mode-switching pass. */
- if (TARGET_ZA && !(ca->isa_mode & AARCH64_FL_ZA_ON))
+ if (TARGET_ZA && !(ca->isa_mode & AARCH64_ISA_MODE_ZA_ON))
emit_insn (gen_aarch64_start_private_za_call ());
/* If this is a call to a shared-ZA function that doesn't share ZT0,
save and restore ZT0 around the call. */
if (aarch64_cfun_has_state ("zt0")
- && (ca->isa_mode & AARCH64_FL_ZA_ON)
+ && (ca->isa_mode & AARCH64_ISA_MODE_ZA_ON)
&& ca->shared_zt0_flags == 0)
aarch64_save_zt0 ();
}
@@ -11232,7 +11233,7 @@ aarch64_expand_call (rtx result, rtx mem, rtx cookie, bool sibcall)
auto callee_isa_mode = aarch64_callee_isa_mode (callee_abi);
if (aarch64_cfun_has_state ("za")
- && (callee_isa_mode & AARCH64_FL_ZA_ON)
+ && (callee_isa_mode & AARCH64_ISA_MODE_ZA_ON)
&& !shared_za_flags)
{
sorry ("call to a function that shares state other than %qs"
@@ -11387,7 +11388,7 @@ aarch64_expand_call (rtx result, rtx mem, rtx cookie, bool sibcall)
gen_rtx_REG (VNx16BImode, ZA_SAVED_REGNUM));
/* Keep the aarch64_start/end_private_za_call markers live. */
- if (!(callee_isa_mode & AARCH64_FL_ZA_ON))
+ if (!(callee_isa_mode & AARCH64_ISA_MODE_ZA_ON))
use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn),
gen_rtx_REG (VNx16BImode, LOWERING_REGNUM));
@@ -11413,13 +11414,13 @@ aarch64_end_call_args (cumulative_args_t ca_v)
/* If this is a call to a private ZA function, emit a marker to
indicate where any necessary restoration code could be inserted.
The code itself is inserted by the mode-switching pass. */
- if (TARGET_ZA && !(ca->isa_mode & AARCH64_FL_ZA_ON))
+ if (TARGET_ZA && !(ca->isa_mode & AARCH64_ISA_MODE_ZA_ON))
emit_insn (gen_aarch64_end_private_za_call ());
/* If this is a call to a shared-ZA function that doesn't share ZT0,
save and restore ZT0 around the call. */
if (aarch64_cfun_has_state ("zt0")
- && (ca->isa_mode & AARCH64_FL_ZA_ON)
+ && (ca->isa_mode & AARCH64_ISA_MODE_ZA_ON)
&& ca->shared_zt0_flags == 0)
aarch64_restore_zt0 (false);
}
@@ -19098,7 +19099,7 @@ aarch64_set_current_function (tree fndecl)
auto new_isa_mode = (fndecl
? aarch64_fndecl_isa_mode (fndecl)
- : AARCH64_FL_DEFAULT_ISA_MODE);
+ : AARCH64_DEFAULT_ISA_MODE);
auto isa_flags = TREE_TARGET_OPTION (new_tree)->x_aarch64_isa_flags;
static bool reported_zt0_p;
@@ -19120,7 +19121,7 @@ aarch64_set_current_function (tree fndecl)
aarch64_pragma_target_parse. */
if (old_tree == new_tree
&& (!fndecl || aarch64_previous_fndecl)
- && (isa_flags & AARCH64_FL_ISA_MODES) == new_isa_mode)
+ && (aarch64_isa_mode) (isa_flags & AARCH64_FL_ISA_MODES) == new_isa_mode)
{
gcc_assert (AARCH64_ISA_MODE == new_isa_mode);
return;
@@ -19135,10 +19136,11 @@ aarch64_set_current_function (tree fndecl)
/* The ISA mode can vary based on function type attributes and
function declaration attributes. Make sure that the target
options correctly reflect these attributes. */
- if ((isa_flags & AARCH64_FL_ISA_MODES) != new_isa_mode)
+ if ((aarch64_isa_mode) (isa_flags & AARCH64_FL_ISA_MODES) != new_isa_mode)
{
auto base_flags = (aarch64_asm_isa_flags & ~AARCH64_FL_ISA_MODES);
- aarch64_set_asm_isa_flags (base_flags | new_isa_mode);
+ aarch64_set_asm_isa_flags (base_flags
+ | (aarch64_feature_flags) new_isa_mode);
aarch64_override_options_internal (&global_options);
new_tree = build_target_option_node (&global_options,
@@ -19203,7 +19205,8 @@ aarch64_handle_attr_arch (const char *str)
{
gcc_assert (tmp_arch);
selected_arch = tmp_arch->arch;
- aarch64_set_asm_isa_flags (tmp_flags | AARCH64_ISA_MODE);
+ aarch64_set_asm_isa_flags (tmp_flags | (aarch64_asm_isa_flags
+ & AARCH64_FL_ISA_MODES));
return true;
}
@@ -19244,7 +19247,8 @@ aarch64_handle_attr_cpu (const char *str)
gcc_assert (tmp_cpu);
selected_tune = tmp_cpu->ident;
selected_arch = tmp_cpu->arch;
- aarch64_set_asm_isa_flags (tmp_flags | AARCH64_ISA_MODE);
+ aarch64_set_asm_isa_flags (tmp_flags | (aarch64_asm_isa_flags
+ & AARCH64_FL_ISA_MODES));
return true;
}
@@ -19322,7 +19326,7 @@ aarch64_handle_attr_isa_flags (char *str)
features if the user wants to handpick specific features. */
if (strncmp ("+nothing", str, 8) == 0)
{
- isa_flags = AARCH64_ISA_MODE;
+ isa_flags &= AARCH64_FL_ISA_MODES;
str += 8;
}
@@ -30036,11 +30040,11 @@ aarch64_switch_pstate_sm_for_landing_pad (basic_block bb)
rtx_insn *guard_label = nullptr;
if (TARGET_STREAMING_COMPATIBLE)
guard_label = aarch64_guard_switch_pstate_sm (IP0_REGNUM,
- AARCH64_FL_SM_OFF);
+ AARCH64_ISA_MODE_SM_OFF);
aarch64_sme_mode_switch_regs args_switch;
args_switch.add_call_preserved_regs (df_get_live_in (bb));
args_switch.emit_prologue ();
- aarch64_switch_pstate_sm (AARCH64_FL_SM_OFF, AARCH64_FL_SM_ON);
+ aarch64_switch_pstate_sm (AARCH64_ISA_MODE_SM_OFF, AARCH64_ISA_MODE_SM_ON);
args_switch.emit_epilogue ();
if (guard_label)
emit_label (guard_label);
@@ -30064,8 +30068,8 @@ aarch64_switch_pstate_sm_for_jump (rtx_insn *jump)
rtx_insn *guard_label = nullptr;
if (TARGET_STREAMING_COMPATIBLE)
guard_label = aarch64_guard_switch_pstate_sm (IP0_REGNUM,
- AARCH64_FL_SM_OFF);
- aarch64_switch_pstate_sm (AARCH64_FL_SM_ON, AARCH64_FL_SM_OFF);
+ AARCH64_ISA_MODE_SM_OFF);
+ aarch64_switch_pstate_sm (AARCH64_ISA_MODE_SM_ON, AARCH64_ISA_MODE_SM_OFF);
if (guard_label)
emit_label (guard_label);
auto seq = get_insns ();
@@ -30160,7 +30164,7 @@ public:
bool
pass_switch_pstate_sm::gate (function *fn)
{
- return (aarch64_fndecl_pstate_sm (fn->decl) != AARCH64_FL_SM_OFF
+ return (aarch64_fndecl_pstate_sm (fn->decl) != AARCH64_ISA_MODE_SM_OFF
|| cfun->machine->call_switches_pstate_sm);
}