@@ -862,6 +862,7 @@ static const attribute_spec aarch64_gnu_attributes[] =
affects_type_identity, handler, exclude } */
{ "aarch64_vector_pcs", 0, 0, false, true, true, true,
handle_aarch64_vector_pcs_attribute, NULL },
+ { "indirect_return", 0, 0, false, true, true, false, NULL, NULL },
{ "arm_sve_vector_bits", 1, 1, false, true, false, true,
aarch64_sve::handle_arm_sve_vector_bits_attribute,
NULL },
@@ -6458,6 +6459,14 @@ aarch64_function_ok_for_sibcall (tree, tree exp)
if (bool (aarch64_cfun_shared_flags (state))
!= bool (aarch64_fntype_shared_flags (fntype, state)))
return false;
+
+ /* BTI J is needed where indirect_return functions may return
+ if bti is enabled there. */
+ if (lookup_attribute ("indirect_return", TYPE_ATTRIBUTES (fntype))
+ && !lookup_attribute ("indirect_return",
+ TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl))))
+ return false;
+
return true;
}
@@ -29246,6 +29255,8 @@ aarch64_comp_type_attributes (const_tree type1, const_tree type2)
if (!check_attr ("gnu", "aarch64_vector_pcs"))
return 0;
+ if (!check_attr ("gnu", "indirect_return"))
+ return 0;
if (!check_attr ("gnu", "Advanced SIMD type"))
return 0;
if (!check_attr ("gnu", "SVE type"))
@@ -92,6 +92,35 @@ const pass_data pass_data_insert_bti =
0, /* todo_flags_finish. */
};
+/* Decide if BTI J is needed after a call instruction. */
+static bool
+call_needs_bti_j (rtx_insn *insn)
+{
+ /* Call returns twice, one of which may be indirect. */
+ if (find_reg_note (insn, REG_SETJMP, NULL))
+ return true;
+
+ /* Tail call does not return. */
+ if (SIBLING_CALL_P (insn))
+ return false;
+
+ /* Check if the function is marked to return indirectly. */
+ rtx call = get_call_rtx_from (insn);
+ rtx fnaddr = XEXP (call, 0);
+ tree fndecl = NULL_TREE;
+ if (GET_CODE (XEXP (fnaddr, 0)) == SYMBOL_REF)
+ fndecl = SYMBOL_REF_DECL (XEXP (fnaddr, 0));
+ if (fndecl == NULL_TREE)
+ fndecl = MEM_EXPR (fnaddr);
+ if (!fndecl)
+ return false;
+ if (TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
+ && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
+ return false;
+ tree fntype = TREE_TYPE (fndecl);
+ return lookup_attribute ("indirect_return", TYPE_ATTRIBUTES (fntype));
+}
+
/* Insert the BTI instruction. */
/* This is implemented as a late RTL pass that runs before branch
shortening and does the following. */
@@ -147,10 +176,9 @@ rest_of_insert_bti (void)
}
}
- /* Also look for calls to setjmp () which would be marked with
- REG_SETJMP note and put a BTI J after. This is where longjump ()
- will return. */
- if (CALL_P (insn) && (find_reg_note (insn, REG_SETJMP, NULL)))
+ /* Also look for calls that may return indirectly, such as setjmp,
+ and put a BTI J after them. */
+ if (CALL_P (insn) && call_needs_bti_j (insn))
{
bti_insn = aarch_gen_bti_j ();
emit_insn_after (bti_insn, insn);
From: Szabolcs Nagy <szabolcs.nagy@arm.com> Tail calls of indirect_return functions from non-indirect_return functions are disallowed even if BTI is disabled, since the call site may have BTI enabled. Following x86, mismatching attribute on function pointers is not a type error even though this can lead to bugs. Needed for swapcontext within the same function when GCS is enabled. gcc/ChangeLog: * config/aarch64/aarch64.cc (aarch64_gnu_attributes): Add indirect_return. (aarch64_function_ok_for_sibcall): Disallow tail calls if caller is non-indirect_return but callee is indirect_return. (aarch64_comp_type_attributes): Check indirect_return attribute. * config/arm/aarch-bti-insert.cc (call_needs_bti_j): New. (rest_of_insert_bti): Use call_needs_bti_j. --- gcc/config/aarch64/aarch64.cc | 11 +++++++++ gcc/config/arm/aarch-bti-insert.cc | 36 ++++++++++++++++++++++++++---- 2 files changed, 43 insertions(+), 4 deletions(-)