@@ -3727,6 +3727,9 @@ ix86_option_override_internal (bool main_args_p,
if (TARGET_X32 && (opts->x_ix86_isa_flags & OPTION_MASK_ISA_MPX))
error ("Intel MPX does not support x32");
+ if (TARGET_X32 && (ix86_isa_flags & OPTION_MASK_ISA_MPX))
+ error ("Intel MPX does not support x32");
+
if (!strcmp (opts->x_ix86_arch_string, "generic"))
error ("generic CPU can be used only for %stune=%s %s",
prefix, suffix, sw);
@@ -6216,10 +6219,15 @@ init_cumulative_args (CUMULATIVE_ARGS *cum, /* Argument info to initialize */
FIXME: once typesytem is fixed, we won't need this code anymore. */
if (i && i->local && i->can_change_signature)
fntype = TREE_TYPE (fndecl);
+ cum->stdarg = stdarg_p (fntype);
cum->maybe_vaarg = (fntype
? (!prototype_p (fntype) || stdarg_p (fntype))
: !libname);
+ cum->bnd_regno = FIRST_BND_REG;
+ cum->bnds_in_bt = 0;
+ cum->force_bnd_pass = 0;
+
if (!TARGET_64BIT)
{
/* If there are variable arguments, then we won't pass anything
@@ -7154,13 +7162,17 @@ construct_container (enum machine_mode mode, enum machine_mode orig_mode,
/* Update the data in CUM to advance over an argument of mode MODE
and data type TYPE. (TYPE is null for libcalls where that information
- may not be available.) */
+ may not be available.)
-static void
+ Return a number of integer regsiters advanced over. */
+
+static int
function_arg_advance_32 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
const_tree type, HOST_WIDE_INT bytes,
HOST_WIDE_INT words)
{
+ int res = 0;
+
switch (mode)
{
default:
@@ -7178,7 +7190,8 @@ function_arg_advance_32 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
cum->words += words;
cum->nregs -= words;
cum->regno += words;
-
+ if (cum->nregs >= 0)
+ res = words;
if (cum->nregs <= 0)
{
cum->nregs = 0;
@@ -7249,9 +7262,11 @@ function_arg_advance_32 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
}
break;
}
+
+ return res;
}
-static void
+static int
function_arg_advance_64 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
const_tree type, HOST_WIDE_INT words, bool named)
{
@@ -7260,7 +7275,7 @@ function_arg_advance_64 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
/* Unnamed 512 and 256bit vector mode parameters are passed on stack. */
if (!named && (VALID_AVX512F_REG_MODE (mode)
|| VALID_AVX256_REG_MODE (mode)))
- return;
+ return 0;
if (!examine_argument (mode, type, 0, &int_nregs, &sse_nregs)
&& sse_nregs <= cum->sse_nregs && int_nregs <= cum->nregs)
@@ -7269,16 +7284,18 @@ function_arg_advance_64 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
cum->sse_nregs -= sse_nregs;
cum->regno += int_nregs;
cum->sse_regno += sse_nregs;
+ return int_nregs;
}
else
{
int align = ix86_function_arg_boundary (mode, type) / BITS_PER_WORD;
cum->words = (cum->words + align - 1) & ~(align - 1);
cum->words += words;
+ return 0;
}
}
-static void
+static int
function_arg_advance_ms_64 (CUMULATIVE_ARGS *cum, HOST_WIDE_INT bytes,
HOST_WIDE_INT words)
{
@@ -7290,7 +7307,9 @@ function_arg_advance_ms_64 (CUMULATIVE_ARGS *cum, HOST_WIDE_INT bytes,
{
cum->nregs -= 1;
cum->regno += 1;
+ return 1;
}
+ return 0;
}
/* Update the data in CUM to advance over an argument of mode MODE and
@@ -7303,6 +7322,7 @@ ix86_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
HOST_WIDE_INT bytes, words;
+ int nregs;
if (mode == BLKmode)
bytes = int_size_in_bytes (type);
@@ -7313,12 +7333,51 @@ ix86_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
if (type)
mode = type_natural_mode (type, NULL, false);
+ if ((type && POINTER_BOUNDS_TYPE_P (type))
+ || POINTER_BOUNDS_MODE_P (mode))
+ {
+ /* If we pass bounds in BT then just update remained bounds count. */
+ if (cum->bnds_in_bt)
+ {
+ cum->bnds_in_bt--;
+ return;
+ }
+
+ /* Update remained number of bounds to force. */
+ if (cum->force_bnd_pass)
+ cum->force_bnd_pass--;
+
+ cum->bnd_regno++;
+
+ return;
+ }
+
+ /* The first arg not going to Bounds Tables resets this counter. */
+ cum->bnds_in_bt = 0;
+ /* For unnamed args we always pass bounds to avoid bounds mess when
+ passed and received types do not match. If bounds do not follow
+ unnamed arg, still pretend required number of bounds were passed. */
+ if (cum->force_bnd_pass)
+ {
+ cum->bnd_regno += cum->force_bnd_pass;
+ cum->force_bnd_pass = 0;
+ }
+
if (TARGET_64BIT && (cum ? cum->call_abi : ix86_abi) == MS_ABI)
- function_arg_advance_ms_64 (cum, bytes, words);
+ nregs = function_arg_advance_ms_64 (cum, bytes, words);
else if (TARGET_64BIT)
- function_arg_advance_64 (cum, mode, type, words, named);
+ nregs = function_arg_advance_64 (cum, mode, type, words, named);
else
- function_arg_advance_32 (cum, mode, type, bytes, words);
+ nregs = function_arg_advance_32 (cum, mode, type, bytes, words);
+
+ /* For stdarg we expect bounds to be passed for each value passed
+ in register. */
+ if (cum->stdarg)
+ cum->force_bnd_pass = nregs;
+ /* For pointers passed in memory we expect bounds passed in Bounds
+ Table. */
+ if (!nregs)
+ cum->bnds_in_bt = chkp_type_bounds_count (type);
}
/* Define where to put the arguments to a function.
@@ -7553,6 +7612,23 @@ ix86_function_arg (cumulative_args_t cum_v, enum machine_mode omode,
HOST_WIDE_INT bytes, words;
rtx arg;
+ /* All pointer bounds argumntas are handled separately here. */
+ if ((type && POINTER_BOUNDS_TYPE_P (type))
+ || POINTER_BOUNDS_MODE_P (mode))
+ {
+ /* Return NULL if bounds are forced to go in Bounds Table. */
+ if (cum->bnds_in_bt)
+ arg = NULL;
+ /* Return the next available bound reg if any. */
+ else if (cum->bnd_regno <= LAST_BND_REG)
+ arg = gen_rtx_REG (BNDmode, cum->bnd_regno);
+ /* Return the next special slot number otherwise. */
+ else
+ arg = GEN_INT (cum->bnd_regno - LAST_BND_REG - 1);
+
+ return arg;
+ }
+
if (mode == BLKmode)
bytes = int_size_in_bytes (type);
else
@@ -7826,6 +7902,9 @@ ix86_function_value_regno_p (const unsigned int regno)
case SI_REG:
return TARGET_64BIT && ix86_abi != MS_ABI;
+ case FIRST_BND_REG:
+ return chkp_function_instrumented_p (current_function_decl);
+
/* Complex values are returned in %st(0)/%st(1) pair. */
case ST0_REG:
case ST1_REG:
@@ -8002,7 +8081,10 @@ ix86_function_value_1 (const_tree valtype, const_tree fntype_or_decl,
fn = fntype_or_decl;
fntype = fn ? TREE_TYPE (fn) : fntype_or_decl;
- if (TARGET_64BIT && ix86_function_type_abi (fntype) == MS_ABI)
+ if ((valtype && POINTER_BOUNDS_TYPE_P (valtype))
+ || POINTER_BOUNDS_MODE_P (mode))
+ return gen_rtx_REG (BNDmode, FIRST_BND_REG);
+ else if (TARGET_64BIT && ix86_function_type_abi (fntype) == MS_ABI)
return function_value_ms_64 (orig_mode, mode, valtype);
else if (TARGET_64BIT)
return function_value_64 (orig_mode, mode, valtype);
@@ -8111,6 +8193,9 @@ ix86_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
const enum machine_mode mode = type_natural_mode (type, NULL, true);
HOST_WIDE_INT size;
+ if (POINTER_BOUNDS_TYPE_P (type))
+ return false;
+
if (TARGET_64BIT)
{
if (ix86_function_type_abi (fntype) == MS_ABI)
@@ -15411,7 +15496,7 @@ ix86_print_operand (FILE *file, rtx x, int code)
return;
case '!':
- if (ix86_bnd_prefixed_insn_p (NULL_RTX))
+ if (ix86_bnd_prefixed_insn_p (current_output_insn))
fputs ("bnd ", file);
return;
@@ -25009,8 +25094,21 @@ ix86_expand_call (rtx retval, rtx fnaddr, rtx callarg1,
}
call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
+
if (retval)
- call = gen_rtx_SET (VOIDmode, retval, call);
+ {
+ /* We should add bounds as destination register in case
+ pointer with bounds may be returned. */
+ if (TARGET_MPX && SCALAR_INT_MODE_P (GET_MODE (retval)))
+ {
+ rtx b0 = gen_rtx_REG (BND64mode, FIRST_BND_REG);
+ rtx b1 = gen_rtx_REG (BND64mode, FIRST_BND_REG + 1);
+ retval = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (3, retval, b0, b1));
+ chkp_put_regs_to_expr_list (retval);
+ }
+
+ call = gen_rtx_SET (VOIDmode, retval, call);
+ }
vec[vec_len++] = call;
if (pop)
@@ -46233,9 +46331,18 @@ ix86_expand_sse2_mulvxdi3 (rtx op0, rtx op1, rtx op2)
bnd by default for current function. */
bool
-ix86_bnd_prefixed_insn_p (rtx insn ATTRIBUTE_UNUSED)
+ix86_bnd_prefixed_insn_p (rtx insn)
{
- return false;
+ /* For call insns check special flag. */
+ if (insn && CALL_P (insn))
+ {
+ rtx call = get_call_rtx_from (insn);
+ if (call)
+ return CALL_EXPR_WITH_BOUNDS_P (call);
+ }
+
+ /* All other insns are prefixed only if function is instrumented. */
+ return chkp_function_instrumented_p (current_function_decl);
}
/* Calculate integer abs() using only SSE2 instructions. */
@@ -1661,6 +1661,10 @@ typedef struct ix86_args {
int float_in_sse; /* Set to 1 or 2 for 32bit targets if
SFmode/DFmode arguments should be passed
in SSE registers. Otherwise 0. */
+ int bnd_regno; /* next available bnd register number */
+ int bnds_in_bt; /* number of bounds expected in BT. */
+ int force_bnd_pass; /* number of bounds expected for stdarg arg. */
+ int stdarg; /* Set to 1 if function is stdarg. */
enum calling_abi call_abi; /* Set to SYSV_ABI for sysv abi. Otherwise
MS_ABI for ms abi. */
} CUMULATIVE_ARGS;