@@ -297,7 +297,7 @@ register_scoped_attribute (const struct attribute_spec *attr,
/* Return the spec for the scoped attribute with namespace NS and
name NAME. */
-const struct attribute_spec *
+static const struct attribute_spec *
lookup_scoped_attribute_spec (const_tree ns, const_tree name)
{
struct substring attr;
@@ -334,7 +334,23 @@ lookup_attribute_spec (const_tree name)
return lookup_scoped_attribute_spec (ns, name);
}
-
+
+/* Return the namespace of the attribute ATTR. This accessor works on
+ GNU and C++11 (scoped) attributes. On GNU attributes,
+ it returns an identifier tree for the string "gnu".
+
+ Please read the comments of cxx11_attribute_p to understand the
+ format of attributes. */
+
+static tree
+get_attribute_namespace (const_tree attr)
+{
+ if (cxx11_attribute_p (attr))
+ return TREE_PURPOSE (TREE_PURPOSE (attr));
+ return get_identifier ("gnu");
+}
+
+
/* Process the attributes listed in ATTRIBUTES and install them in *NODE,
which is either a DECL (including a TYPE_DECL) or a TYPE. If a DECL,
it should be modified in place; if a TYPE, a copy should be created
@@ -659,21 +675,6 @@ get_attribute_name (const_tree attr)
return TREE_PURPOSE (attr);
}
-/* Return the namespace of the attribute ATTR. This accessor works on
- GNU and C++11 (scoped) attributes. On GNU attributes,
- it returns an identifier tree for the string "gnu".
-
- Please read the comments of cxx11_attribute_p to understand the
- format of attributes. */
-
-tree
-get_attribute_namespace (const_tree attr)
-{
- if (cxx11_attribute_p (attr))
- return TREE_PURPOSE (TREE_PURPOSE (attr));
- return get_identifier ("gnu");
-}
-
/* Subroutine of set_method_tm_attributes. Apply TM attribute ATTR
to the method FNDECL. */
@@ -1050,6 +1050,63 @@ expand_builtin_longjmp (rtx buf_addr, rtx value)
}
}
+static inline bool
+more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
+{
+ return (iter->i < iter->n);
+}
+
+/* This function validates the types of a function call argument list
+ against a specified list of tree_codes. If the last specifier is a 0,
+ that represents an ellipses, otherwise the last specifier must be a
+ VOID_TYPE. */
+
+static bool
+validate_arglist (const_tree callexpr, ...)
+{
+ enum tree_code code;
+ bool res = 0;
+ va_list ap;
+ const_call_expr_arg_iterator iter;
+ const_tree arg;
+
+ va_start (ap, callexpr);
+ init_const_call_expr_arg_iterator (callexpr, &iter);
+
+ do
+ {
+ code = (enum tree_code) va_arg (ap, int);
+ switch (code)
+ {
+ case 0:
+ /* This signifies an ellipses, any further arguments are all ok. */
+ res = true;
+ goto end;
+ case VOID_TYPE:
+ /* This signifies an endlink, if no arguments remain, return
+ true, otherwise return false. */
+ res = !more_const_call_expr_args_p (&iter);
+ goto end;
+ default:
+ /* If no parameters remain or the parameter's code does not
+ match the specified code, return false. Otherwise continue
+ checking any remaining arguments. */
+ arg = next_const_call_expr_arg (&iter);
+ if (!validate_arg (arg, code))
+ goto end;
+ break;
+ }
+ }
+ while (1);
+
+ /* We need gotos here since we can only have one VA_CLOSE in a
+ function. */
+ end: ;
+ va_end (ap);
+
+ return res;
+}
+
/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
and the address of the save area. */
@@ -5581,6 +5638,33 @@ expand_builtin_set_thread_pointer (tree exp)
}
+/* Emit code to restore the current value of stack. */
+
+static void
+expand_stack_restore (tree var)
+{
+ rtx prev, sa = expand_normal (var);
+
+ sa = convert_memory_address (Pmode, sa);
+
+ prev = get_last_insn ();
+ emit_stack_restore (SAVE_BLOCK, sa);
+ fixup_args_size_notes (prev, get_last_insn (), 0);
+}
+
+
+/* Emit code to save the current value of stack. */
+
+static rtx
+expand_stack_save (void)
+{
+ rtx ret = NULL_RTX;
+
+ do_pending_stack_adjust ();
+ emit_stack_save (SAVE_BLOCK, &ret);
+ return ret;
+}
+
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
(and in mode MODE if that's convenient).
@@ -10962,47 +11046,51 @@ fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
return NULL_TREE;
}
-/* Builtins with folding operations that operate on "..." arguments
- need special handling; we need to store the arguments in a convenient
- data structure before attempting any folding. Fortunately there are
- only a few builtins that fall into this category. FNDECL is the
- function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
- result of the function call is ignored. */
+/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
+ list ARGS along with N new arguments in NEWARGS. SKIP is the number
+ of arguments in ARGS to be omitted. OLDNARGS is the number of
+ elements in ARGS. */
static tree
-fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
- bool ignore ATTRIBUTE_UNUSED)
+rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
+ int skip, tree fndecl, int n, va_list newargs)
{
- enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
- tree ret = NULL_TREE;
+ int nargs = oldnargs - skip + n;
+ tree *buffer;
- switch (fcode)
+ if (n > 0)
{
- case BUILT_IN_SPRINTF_CHK:
- case BUILT_IN_VSPRINTF_CHK:
- ret = fold_builtin_sprintf_chk (loc, exp, fcode);
- break;
+ int i, j;
- case BUILT_IN_SNPRINTF_CHK:
- case BUILT_IN_VSNPRINTF_CHK:
- ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
- break;
+ buffer = XALLOCAVEC (tree, nargs);
+ for (i = 0; i < n; i++)
+ buffer[i] = va_arg (newargs, tree);
+ for (j = skip; j < oldnargs; j++, i++)
+ buffer[i] = args[j];
+ }
+ else
+ buffer = args + skip;
- case BUILT_IN_FPCLASSIFY:
- ret = fold_builtin_fpclassify (loc, exp);
- break;
+ return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
+}
- default:
- break;
- }
- if (ret)
- {
- ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
- SET_EXPR_LOCATION (ret, loc);
- TREE_NO_WARNING (ret) = 1;
- return ret;
- }
- return NULL_TREE;
+/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
+ list ARGS along with N new arguments specified as the "..."
+ parameters. SKIP is the number of arguments in ARGS to be omitted.
+ OLDNARGS is the number of elements in ARGS. */
+
+static tree
+rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
+ int skip, tree fndecl, int n, ...)
+{
+ va_list ap;
+ tree t;
+
+ va_start (ap, n);
+ t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
+ va_end (ap);
+
+ return t;
}
/* Return true if FNDECL shouldn't be folded right now.
@@ -11193,53 +11281,6 @@ fold_builtin_call_array (location_t loc, tree type,
return build_call_array_loc (loc, type, fn, n, argarray);
}
-/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
- list ARGS along with N new arguments in NEWARGS. SKIP is the number
- of arguments in ARGS to be omitted. OLDNARGS is the number of
- elements in ARGS. */
-
-static tree
-rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
- int skip, tree fndecl, int n, va_list newargs)
-{
- int nargs = oldnargs - skip + n;
- tree *buffer;
-
- if (n > 0)
- {
- int i, j;
-
- buffer = XALLOCAVEC (tree, nargs);
- for (i = 0; i < n; i++)
- buffer[i] = va_arg (newargs, tree);
- for (j = skip; j < oldnargs; j++, i++)
- buffer[i] = args[j];
- }
- else
- buffer = args + skip;
-
- return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
-}
-
-/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
- list ARGS along with N new arguments specified as the "..."
- parameters. SKIP is the number of arguments in ARGS to be omitted.
- OLDNARGS is the number of elements in ARGS. */
-
-static tree
-rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
- int skip, tree fndecl, int n, ...)
-{
- va_list ap;
- tree t;
-
- va_start (ap, n);
- t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
- va_end (ap);
-
- return t;
-}
-
/* Construct a new CALL_EXPR using the tail of the argument list of EXP
along with N new arguments specified as the "..." parameters. SKIP
is the number of arguments in EXP to be omitted. This function is used
@@ -11329,57 +11370,6 @@ validate_gimple_arglist (const_gimple call, ...)
return res;
}
-/* This function validates the types of a function call argument list
- against a specified list of tree_codes. If the last specifier is a 0,
- that represents an ellipses, otherwise the last specifier must be a
- VOID_TYPE. */
-
-bool
-validate_arglist (const_tree callexpr, ...)
-{
- enum tree_code code;
- bool res = 0;
- va_list ap;
- const_call_expr_arg_iterator iter;
- const_tree arg;
-
- va_start (ap, callexpr);
- init_const_call_expr_arg_iterator (callexpr, &iter);
-
- do
- {
- code = (enum tree_code) va_arg (ap, int);
- switch (code)
- {
- case 0:
- /* This signifies an ellipses, any further arguments are all ok. */
- res = true;
- goto end;
- case VOID_TYPE:
- /* This signifies an endlink, if no arguments remain, return
- true, otherwise return false. */
- res = !more_const_call_expr_args_p (&iter);
- goto end;
- default:
- /* If no parameters remain or the parameter's code does not
- match the specified code, return false. Otherwise continue
- checking any remaining arguments. */
- arg = next_const_call_expr_arg (&iter);
- if (!validate_arg (arg, code))
- goto end;
- break;
- }
- }
- while (1);
-
- /* We need gotos here since we can only have one VA_CLOSE in a
- function. */
- end: ;
- va_end (ap);
-
- return res;
-}
-
/* Default target-specific builtin expander that does nothing. */
rtx
@@ -13151,7 +13141,7 @@ fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
passed as second argument. */
-tree
+static tree
fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
enum built_in_function fcode)
{
@@ -13159,6 +13149,49 @@ fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
CALL_EXPR_ARGP (exp), maxlen, fcode);
}
+/* Builtins with folding operations that operate on "..." arguments
+ need special handling; we need to store the arguments in a convenient
+ data structure before attempting any folding. Fortunately there are
+ only a few builtins that fall into this category. FNDECL is the
+ function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
+ result of the function call is ignored. */
+
+static tree
+fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
+ bool ignore ATTRIBUTE_UNUSED)
+{
+ enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+ tree ret = NULL_TREE;
+
+ switch (fcode)
+ {
+ case BUILT_IN_SPRINTF_CHK:
+ case BUILT_IN_VSPRINTF_CHK:
+ ret = fold_builtin_sprintf_chk (loc, exp, fcode);
+ break;
+
+ case BUILT_IN_SNPRINTF_CHK:
+ case BUILT_IN_VSNPRINTF_CHK:
+ ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
+ break;
+
+ case BUILT_IN_FPCLASSIFY:
+ ret = fold_builtin_fpclassify (loc, exp);
+ break;
+
+ default:
+ break;
+ }
+ if (ret)
+ {
+ ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ SET_EXPR_LOCATION (ret, loc);
+ TREE_NO_WARNING (ret) = 1;
+ return ret;
+ }
+ return NULL_TREE;
+}
+
/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
FMT and ARG are the arguments to the call; we don't fold cases with
more than 2 arguments, and ARG may be null if this is a 1-argument case.
@@ -21,8 +21,9 @@ along with GCC; see the file COPYING3. If not see
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "tree.h"
#include "rtl.h"
+#include "hard-reg-set.h"
+#include "tree.h"
#include "tm_p.h"
#include "basic-block.h"
#include "function.h"
@@ -57,6 +58,8 @@ along with GCC; see the file COPYING3. If not see
#include "insn-attr.h" /* For INSN_SCHEDULING. */
#include "asan.h"
#include "tree-ssa-address.h"
+#include "recog.h"
+#include "output.h"
/* This variable holds information helping the rewriting of SSA trees
into RTL. */
@@ -2194,6 +2197,866 @@ expand_call_stmt (gimple stmt)
mark_transaction_restart_calls (stmt);
}
+
+/* Generate RTL for an asm statement (explicit assembler code).
+ STRING is a STRING_CST node containing the assembler code text,
+ or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
+ insn is volatile; don't optimize it. */
+
+static void
+expand_asm_loc (tree string, int vol, location_t locus)
+{
+ rtx body;
+
+ if (TREE_CODE (string) == ADDR_EXPR)
+ string = TREE_OPERAND (string, 0);
+
+ body = gen_rtx_ASM_INPUT_loc (VOIDmode,
+ ggc_strdup (TREE_STRING_POINTER (string)),
+ locus);
+
+ MEM_VOLATILE_P (body) = vol;
+
+ emit_insn (body);
+}
+
+/* Return the number of times character C occurs in string S. */
+static int
+n_occurrences (int c, const char *s)
+{
+ int n = 0;
+ while (*s)
+ n += (*s++ == c);
+ return n;
+}
+
+/* A subroutine of expand_asm_operands. Check that all operands have
+ the same number of alternatives. Return true if so. */
+
+static bool
+check_operand_nalternatives (tree outputs, tree inputs)
+{
+ if (outputs || inputs)
+ {
+ tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
+ int nalternatives
+ = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
+ tree next = inputs;
+
+ if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
+ {
+ error ("too many alternatives in %<asm%>");
+ return false;
+ }
+
+ tmp = outputs;
+ while (tmp)
+ {
+ const char *constraint
+ = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
+
+ if (n_occurrences (',', constraint) != nalternatives)
+ {
+ error ("operand constraints for %<asm%> differ "
+ "in number of alternatives");
+ return false;
+ }
+
+ if (TREE_CHAIN (tmp))
+ tmp = TREE_CHAIN (tmp);
+ else
+ tmp = next, next = 0;
+ }
+ }
+
+ return true;
+}
+
+/* Check for overlap between registers marked in CLOBBERED_REGS and
+ anything inappropriate in T. Emit error and return the register
+ variable definition for error, NULL_TREE for ok. */
+
+static bool
+tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
+{
+ /* Conflicts between asm-declared register variables and the clobber
+ list are not allowed. */
+ tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
+
+ if (overlap)
+ {
+ error ("asm-specifier for variable %qE conflicts with asm clobber list",
+ DECL_NAME (overlap));
+
+ /* Reset registerness to stop multiple errors emitted for a single
+ variable. */
+ DECL_REGISTER (overlap) = 0;
+ return true;
+ }
+
+ return false;
+}
+
+/* Generate RTL for an asm statement with arguments.
+ STRING is the instruction template.
+ OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
+ Each output or input has an expression in the TREE_VALUE and
+ a tree list in TREE_PURPOSE which in turn contains a constraint
+ name in TREE_VALUE (or NULL_TREE) and a constraint string
+ in TREE_PURPOSE.
+ CLOBBERS is a list of STRING_CST nodes each naming a hard register
+ that is clobbered by this insn.
+
+ LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
+ should be the fallthru basic block of the asm goto.
+
+ Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
+ Some elements of OUTPUTS may be replaced with trees representing temporary
+ values. The caller should copy those temporary values to the originally
+ specified lvalues.
+
+ VOL nonzero means the insn is volatile; don't optimize it. */
+
+static void
+expand_asm_operands (tree string, tree outputs, tree inputs,
+ tree clobbers, tree labels, basic_block fallthru_bb,
+ int vol, location_t locus)
+{
+ rtvec argvec, constraintvec, labelvec;
+ rtx body;
+ int ninputs = list_length (inputs);
+ int noutputs = list_length (outputs);
+ int nlabels = list_length (labels);
+ int ninout;
+ int nclobbers;
+ HARD_REG_SET clobbered_regs;
+ int clobber_conflict_found = 0;
+ tree tail;
+ tree t;
+ int i;
+ /* Vector of RTX's of evaluated output operands. */
+ rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
+ int *inout_opnum = XALLOCAVEC (int, noutputs);
+ rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
+ enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
+ const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
+ int old_generating_concat_p = generating_concat_p;
+ rtx fallthru_label = NULL_RTX;
+
+ /* An ASM with no outputs needs to be treated as volatile, for now. */
+ if (noutputs == 0)
+ vol = 1;
+
+ if (! check_operand_nalternatives (outputs, inputs))
+ return;
+
+ string = resolve_asm_operand_names (string, outputs, inputs, labels);
+
+ /* Collect constraints. */
+ i = 0;
+ for (t = outputs; t ; t = TREE_CHAIN (t), i++)
+ constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
+ for (t = inputs; t ; t = TREE_CHAIN (t), i++)
+ constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
+
+ /* Sometimes we wish to automatically clobber registers across an asm.
+ Case in point is when the i386 backend moved from cc0 to a hard reg --
+ maintaining source-level compatibility means automatically clobbering
+ the flags register. */
+ clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
+
+ /* Count the number of meaningful clobbered registers, ignoring what
+ we would ignore later. */
+ nclobbers = 0;
+ CLEAR_HARD_REG_SET (clobbered_regs);
+ for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
+ {
+ const char *regname;
+ int nregs;
+
+ if (TREE_VALUE (tail) == error_mark_node)
+ return;
+ regname = TREE_STRING_POINTER (TREE_VALUE (tail));
+
+ i = decode_reg_name_and_count (regname, &nregs);
+ if (i == -4)
+ ++nclobbers;
+ else if (i == -2)
+ error ("unknown register name %qs in %<asm%>", regname);
+
+ /* Mark clobbered registers. */
+ if (i >= 0)
+ {
+ int reg;
+
+ for (reg = i; reg < i + nregs; reg++)
+ {
+ ++nclobbers;
+
+ /* Clobbering the PIC register is an error. */
+ if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
+ {
+ error ("PIC register clobbered by %qs in %<asm%>", regname);
+ return;
+ }
+
+ SET_HARD_REG_BIT (clobbered_regs, reg);
+ }
+ }
+ }
+
+ /* First pass over inputs and outputs checks validity and sets
+ mark_addressable if needed. */
+
+ ninout = 0;
+ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
+ {
+ tree val = TREE_VALUE (tail);
+ tree type = TREE_TYPE (val);
+ const char *constraint;
+ bool is_inout;
+ bool allows_reg;
+ bool allows_mem;
+
+ /* If there's an erroneous arg, emit no insn. */
+ if (type == error_mark_node)
+ return;
+
+ /* Try to parse the output constraint. If that fails, there's
+ no point in going further. */
+ constraint = constraints[i];
+ if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
+ &allows_mem, &allows_reg, &is_inout))
+ return;
+
+ if (! allows_reg
+ && (allows_mem
+ || is_inout
+ || (DECL_P (val)
+ && REG_P (DECL_RTL (val))
+ && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
+ mark_addressable (val);
+
+ if (is_inout)
+ ninout++;
+ }
+
+ ninputs += ninout;
+ if (ninputs + noutputs > MAX_RECOG_OPERANDS)
+ {
+ error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
+ return;
+ }
+
+ for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
+ {
+ bool allows_reg, allows_mem;
+ const char *constraint;
+
+ /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
+ would get VOIDmode and that could cause a crash in reload. */
+ if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
+ return;
+
+ constraint = constraints[i + noutputs];
+ if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
+ constraints, &allows_mem, &allows_reg))
+ return;
+
+ if (! allows_reg && allows_mem)
+ mark_addressable (TREE_VALUE (tail));
+ }
+
+ /* Second pass evaluates arguments. */
+
+ /* Make sure stack is consistent for asm goto. */
+ if (nlabels > 0)
+ do_pending_stack_adjust ();
+
+ ninout = 0;
+ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
+ {
+ tree val = TREE_VALUE (tail);
+ tree type = TREE_TYPE (val);
+ bool is_inout;
+ bool allows_reg;
+ bool allows_mem;
+ rtx op;
+ bool ok;
+
+ ok = parse_output_constraint (&constraints[i], i, ninputs,
+ noutputs, &allows_mem, &allows_reg,
+ &is_inout);
+ gcc_assert (ok);
+
+ /* If an output operand is not a decl or indirect ref and our constraint
+ allows a register, make a temporary to act as an intermediate.
+ Make the asm insn write into that, then our caller will copy it to
+ the real output operand. Likewise for promoted variables. */
+
+ generating_concat_p = 0;
+
+ real_output_rtx[i] = NULL_RTX;
+ if ((TREE_CODE (val) == INDIRECT_REF
+ && allows_mem)
+ || (DECL_P (val)
+ && (allows_mem || REG_P (DECL_RTL (val)))
+ && ! (REG_P (DECL_RTL (val))
+ && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
+ || ! allows_reg
+ || is_inout)
+ {
+ op = expand_expr (val, NULL_RTX, VOIDmode,
+ !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
+ if (MEM_P (op))
+ op = validize_mem (op);
+
+ if (! allows_reg && !MEM_P (op))
+ error ("output number %d not directly addressable", i);
+ if ((! allows_mem && MEM_P (op))
+ || GET_CODE (op) == CONCAT)
+ {
+ real_output_rtx[i] = op;
+ op = gen_reg_rtx (GET_MODE (op));
+ if (is_inout)
+ emit_move_insn (op, real_output_rtx[i]);
+ }
+ }
+ else
+ {
+ op = assign_temp (type, 0, 1);
+ op = validize_mem (op);
+ if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
+ set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
+ TREE_VALUE (tail) = make_tree (type, op);
+ }
+ output_rtx[i] = op;
+
+ generating_concat_p = old_generating_concat_p;
+
+ if (is_inout)
+ {
+ inout_mode[ninout] = TYPE_MODE (type);
+ inout_opnum[ninout++] = i;
+ }
+
+ if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
+ clobber_conflict_found = 1;
+ }
+
+ /* Make vectors for the expression-rtx, constraint strings,
+ and named operands. */
+
+ argvec = rtvec_alloc (ninputs);
+ constraintvec = rtvec_alloc (ninputs);
+ labelvec = rtvec_alloc (nlabels);
+
+ body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
+ : GET_MODE (output_rtx[0])),
+ ggc_strdup (TREE_STRING_POINTER (string)),
+ empty_string, 0, argvec, constraintvec,
+ labelvec, locus);
+
+ MEM_VOLATILE_P (body) = vol;
+
+ /* Eval the inputs and put them into ARGVEC.
+ Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
+
+ for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
+ {
+ bool allows_reg, allows_mem;
+ const char *constraint;
+ tree val, type;
+ rtx op;
+ bool ok;
+
+ constraint = constraints[i + noutputs];
+ ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
+ constraints, &allows_mem, &allows_reg);
+ gcc_assert (ok);
+
+ generating_concat_p = 0;
+
+ val = TREE_VALUE (tail);
+ type = TREE_TYPE (val);
+ /* EXPAND_INITIALIZER will not generate code for valid initializer
+ constants, but will still generate code for other types of operand.
+ This is the behavior we want for constant constraints. */
+ op = expand_expr (val, NULL_RTX, VOIDmode,
+ allows_reg ? EXPAND_NORMAL
+ : allows_mem ? EXPAND_MEMORY
+ : EXPAND_INITIALIZER);
+
+ /* Never pass a CONCAT to an ASM. */
+ if (GET_CODE (op) == CONCAT)
+ op = force_reg (GET_MODE (op), op);
+ else if (MEM_P (op))
+ op = validize_mem (op);
+
+ if (asm_operand_ok (op, constraint, NULL) <= 0)
+ {
+ if (allows_reg && TYPE_MODE (type) != BLKmode)
+ op = force_reg (TYPE_MODE (type), op);
+ else if (!allows_mem)
+ warning (0, "asm operand %d probably doesn%'t match constraints",
+ i + noutputs);
+ else if (MEM_P (op))
+ {
+ /* We won't recognize either volatile memory or memory
+ with a queued address as available a memory_operand
+ at this point. Ignore it: clearly this *is* a memory. */
+ }
+ else
+ gcc_unreachable ();
+ }
+
+ generating_concat_p = old_generating_concat_p;
+ ASM_OPERANDS_INPUT (body, i) = op;
+
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
+ = gen_rtx_ASM_INPUT (TYPE_MODE (type),
+ ggc_strdup (constraints[i + noutputs]));
+
+ if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
+ clobber_conflict_found = 1;
+ }
+
+ /* Protect all the operands from the queue now that they have all been
+ evaluated. */
+
+ generating_concat_p = 0;
+
+ /* For in-out operands, copy output rtx to input rtx. */
+ for (i = 0; i < ninout; i++)
+ {
+ int j = inout_opnum[i];
+ char buffer[16];
+
+ ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
+ = output_rtx[j];
+
+ sprintf (buffer, "%d", j);
+ ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
+ = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
+ }
+
+ /* Copy labels to the vector. */
+ for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
+ {
+ rtx r;
+ /* If asm goto has any labels in the fallthru basic block, use
+ a label that we emit immediately after the asm goto. Expansion
+ may insert further instructions into the same basic block after
+ asm goto and if we don't do this, insertion of instructions on
+ the fallthru edge might misbehave. See PR58670. */
+ if (fallthru_bb
+ && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
+ {
+ if (fallthru_label == NULL_RTX)
+ fallthru_label = gen_label_rtx ();
+ r = fallthru_label;
+ }
+ else
+ r = label_rtx (TREE_VALUE (tail));
+ ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
+ }
+
+ generating_concat_p = old_generating_concat_p;
+
+ /* Now, for each output, construct an rtx
+ (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
+ ARGVEC CONSTRAINTS OPNAMES))
+ If there is more than one, put them inside a PARALLEL. */
+
+ if (nlabels > 0 && nclobbers == 0)
+ {
+ gcc_assert (noutputs == 0);
+ emit_jump_insn (body);
+ }
+ else if (noutputs == 0 && nclobbers == 0)
+ {
+ /* No output operands: put in a raw ASM_OPERANDS rtx. */
+ emit_insn (body);
+ }
+ else if (noutputs == 1 && nclobbers == 0)
+ {
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
+ emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
+ }
+ else
+ {
+ rtx obody = body;
+ int num = noutputs;
+
+ if (num == 0)
+ num = 1;
+
+ body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
+
+ /* For each output operand, store a SET. */
+ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
+ {
+ XVECEXP (body, 0, i)
+ = gen_rtx_SET (VOIDmode,
+ output_rtx[i],
+ gen_rtx_ASM_OPERANDS
+ (GET_MODE (output_rtx[i]),
+ ggc_strdup (TREE_STRING_POINTER (string)),
+ ggc_strdup (constraints[i]),
+ i, argvec, constraintvec, labelvec, locus));
+
+ MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
+ }
+
+ /* If there are no outputs (but there are some clobbers)
+ store the bare ASM_OPERANDS into the PARALLEL. */
+
+ if (i == 0)
+ XVECEXP (body, 0, i++) = obody;
+
+ /* Store (clobber REG) for each clobbered register specified. */
+
+ for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
+ {
+ const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
+ int reg, nregs;
+ int j = decode_reg_name_and_count (regname, &nregs);
+ rtx clobbered_reg;
+
+ if (j < 0)
+ {
+ if (j == -3) /* `cc', which is not a register */
+ continue;
+
+ if (j == -4) /* `memory', don't cache memory across asm */
+ {
+ XVECEXP (body, 0, i++)
+ = gen_rtx_CLOBBER (VOIDmode,
+ gen_rtx_MEM
+ (BLKmode,
+ gen_rtx_SCRATCH (VOIDmode)));
+ continue;
+ }
+
+ /* Ignore unknown register, error already signaled. */
+ continue;
+ }
+
+ for (reg = j; reg < j + nregs; reg++)
+ {
+ /* Use QImode since that's guaranteed to clobber just
+ * one reg. */
+ clobbered_reg = gen_rtx_REG (QImode, reg);
+
+ /* Do sanity check for overlap between clobbers and
+ respectively input and outputs that hasn't been
+ handled. Such overlap should have been detected and
+ reported above. */
+ if (!clobber_conflict_found)
+ {
+ int opno;
+
+ /* We test the old body (obody) contents to avoid
+ tripping over the under-construction body. */
+ for (opno = 0; opno < noutputs; opno++)
+ if (reg_overlap_mentioned_p (clobbered_reg,
+ output_rtx[opno]))
+ internal_error
+ ("asm clobber conflict with output operand");
+
+ for (opno = 0; opno < ninputs - ninout; opno++)
+ if (reg_overlap_mentioned_p (clobbered_reg,
+ ASM_OPERANDS_INPUT (obody,
+ opno)))
+ internal_error
+ ("asm clobber conflict with input operand");
+ }
+
+ XVECEXP (body, 0, i++)
+ = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
+ }
+ }
+
+ if (nlabels > 0)
+ emit_jump_insn (body);
+ else
+ emit_insn (body);
+ }
+
+ if (fallthru_label)
+ emit_label (fallthru_label);
+
+ /* For any outputs that needed reloading into registers, spill them
+ back to where they belong. */
+ for (i = 0; i < noutputs; ++i)
+ if (real_output_rtx[i])
+ emit_move_insn (real_output_rtx[i], output_rtx[i]);
+
+ crtl->has_asm_statement = 1;
+ free_temp_slots ();
+}
+
+
+static void
+expand_asm_stmt (gimple stmt)
+{
+ int noutputs;
+ tree outputs, tail, t;
+ tree *o;
+ size_t i, n;
+ const char *s;
+ tree str, out, in, cl, labels;
+ location_t locus = gimple_location (stmt);
+ basic_block fallthru_bb = NULL;
+
+ /* Meh... convert the gimple asm operands into real tree lists.
+ Eventually we should make all routines work on the vectors instead
+ of relying on TREE_CHAIN. */
+ out = NULL_TREE;
+ n = gimple_asm_noutputs (stmt);
+ if (n > 0)
+ {
+ t = out = gimple_asm_output_op (stmt, 0);
+ for (i = 1; i < n; i++)
+ t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
+ }
+
+ in = NULL_TREE;
+ n = gimple_asm_ninputs (stmt);
+ if (n > 0)
+ {
+ t = in = gimple_asm_input_op (stmt, 0);
+ for (i = 1; i < n; i++)
+ t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
+ }
+
+ cl = NULL_TREE;
+ n = gimple_asm_nclobbers (stmt);
+ if (n > 0)
+ {
+ t = cl = gimple_asm_clobber_op (stmt, 0);
+ for (i = 1; i < n; i++)
+ t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
+ }
+
+ labels = NULL_TREE;
+ n = gimple_asm_nlabels (stmt);
+ if (n > 0)
+ {
+ edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
+ if (fallthru)
+ fallthru_bb = fallthru->dest;
+ t = labels = gimple_asm_label_op (stmt, 0);
+ for (i = 1; i < n; i++)
+ t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
+ }
+
+ s = gimple_asm_string (stmt);
+ str = build_string (strlen (s), s);
+
+ if (gimple_asm_input_p (stmt))
+ {
+ expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
+ return;
+ }
+
+ outputs = out;
+ noutputs = gimple_asm_noutputs (stmt);
+ /* o[I] is the place that output number I should be written. */
+ o = (tree *) alloca (noutputs * sizeof (tree));
+
+ /* Record the contents of OUTPUTS before it is modified. */
+ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
+ o[i] = TREE_VALUE (tail);
+
+ /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
+ OUTPUTS some trees for where the values were actually stored. */
+ expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
+ gimple_asm_volatile_p (stmt), locus);
+
+ /* Copy all the intermediate outputs into the specified outputs. */
+ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
+ {
+ if (o[i] != TREE_VALUE (tail))
+ {
+ expand_assignment (o[i], TREE_VALUE (tail), false);
+ free_temp_slots ();
+
+ /* Restore the original value so that it's correct the next
+ time we expand this function. */
+ TREE_VALUE (tail) = o[i];
+ }
+ }
+}
+
+/* Emit code to jump to the address
+ specified by the pointer expression EXP. */
+
+static void
+expand_computed_goto (tree exp)
+{
+ rtx x = expand_normal (exp);
+
+ x = convert_memory_address (Pmode, x);
+
+ do_pending_stack_adjust ();
+ emit_indirect_jump (x);
+}
+
+/* Generate RTL code for a `goto' statement with target label LABEL.
+ LABEL should be a LABEL_DECL tree node that was or will later be
+ defined with `expand_label'. */
+
+static void
+expand_goto (tree label)
+{
+#ifdef ENABLE_CHECKING
+ /* Check for a nonlocal goto to a containing function. Should have
+ gotten translated to __builtin_nonlocal_goto. */
+ tree context = decl_function_context (label);
+ gcc_assert (!context || context == current_function_decl);
+#endif
+
+ emit_jump (label_rtx (label));
+}
+
+/* Output a return with no value. */
+
+static void
+expand_null_return_1 (void)
+{
+ clear_pending_stack_adjust ();
+ do_pending_stack_adjust ();
+ emit_jump (return_label);
+}
+
+/* Generate RTL to return from the current function, with no value.
+ (That is, we do not do anything about returning any value.) */
+
+void
+expand_null_return (void)
+{
+ /* If this function was declared to return a value, but we
+ didn't, clobber the return registers so that they are not
+ propagated live to the rest of the function. */
+ clobber_return_register ();
+
+ expand_null_return_1 ();
+}
+
+/* Generate RTL to return from the current function, with value VAL. */
+
+static void
+expand_value_return (rtx val)
+{
+ /* Copy the value to the return location unless it's already there. */
+
+ tree decl = DECL_RESULT (current_function_decl);
+ rtx return_reg = DECL_RTL (decl);
+ if (return_reg != val)
+ {
+ tree funtype = TREE_TYPE (current_function_decl);
+ tree type = TREE_TYPE (decl);
+ int unsignedp = TYPE_UNSIGNED (type);
+ enum machine_mode old_mode = DECL_MODE (decl);
+ enum machine_mode mode;
+ if (DECL_BY_REFERENCE (decl))
+ mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
+ else
+ mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
+
+ if (mode != old_mode)
+ val = convert_modes (mode, old_mode, val, unsignedp);
+
+ if (GET_CODE (return_reg) == PARALLEL)
+ emit_group_load (return_reg, val, type, int_size_in_bytes (type));
+ else
+ emit_move_insn (return_reg, val);
+ }
+
+ expand_null_return_1 ();
+}
+
+/* Generate RTL to evaluate the expression RETVAL and return it
+ from the current function. */
+
+static void
+expand_return (tree retval)
+{
+ rtx result_rtl;
+ rtx val = 0;
+ tree retval_rhs;
+
+ /* If function wants no value, give it none. */
+ if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
+ {
+ expand_normal (retval);
+ expand_null_return ();
+ return;
+ }
+
+ if (retval == error_mark_node)
+ {
+ /* Treat this like a return of no value from a function that
+ returns a value. */
+ expand_null_return ();
+ return;
+ }
+ else if ((TREE_CODE (retval) == MODIFY_EXPR
+ || TREE_CODE (retval) == INIT_EXPR)
+ && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
+ retval_rhs = TREE_OPERAND (retval, 1);
+ else
+ retval_rhs = retval;
+
+ result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
+
+ /* If we are returning the RESULT_DECL, then the value has already
+ been stored into it, so we don't have to do anything special. */
+ if (TREE_CODE (retval_rhs) == RESULT_DECL)
+ expand_value_return (result_rtl);
+
+ /* If the result is an aggregate that is being returned in one (or more)
+ registers, load the registers here. */
+
+ else if (retval_rhs != 0
+ && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
+ && REG_P (result_rtl))
+ {
+ val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
+ if (val)
+ {
+ /* Use the mode of the result value on the return register. */
+ PUT_MODE (result_rtl, GET_MODE (val));
+ expand_value_return (val);
+ }
+ else
+ expand_null_return ();
+ }
+ else if (retval_rhs != 0
+ && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
+ && (REG_P (result_rtl)
+ || (GET_CODE (result_rtl) == PARALLEL)))
+ {
+ /* Calculate the return value into a temporary (usually a pseudo
+ reg). */
+ tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
+ tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
+
+ val = assign_temp (nt, 0, 1);
+ val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
+ val = force_not_mem (val);
+ /* Return the calculated value. */
+ expand_value_return (val);
+ }
+ else
+ {
+ /* No hard reg used; calculate value into hard return reg. */
+ expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ expand_value_return (result_rtl);
+ }
+}
+
/* A subroutine of expand_gimple_stmt, expanding one gimple statement
STMT that doesn't require special handling for outgoing edges. That
is no tailcalls and no GIMPLE_COND. */
@@ -4534,6 +5397,52 @@ expand_stack_alignment (void)
fixup_tail_calls ();
}
}
+
+
+static void
+expand_main_function (void)
+{
+#if (defined(INVOKE__main) \
+ || (!defined(HAS_INIT_SECTION) \
+ && !defined(INIT_SECTION_ASM_OP) \
+ && !defined(INIT_ARRAY_SECTION_ASM_OP)))
+ emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
+#endif
+}
+
+
+/* Expand code to initialize the stack_protect_guard. This is invoked at
+ the beginning of a function to be protected. */
+
+#ifndef HAVE_stack_protect_set
+# define HAVE_stack_protect_set 0
+# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
+#endif
+
+static void
+stack_protect_prologue (void)
+{
+ tree guard_decl = targetm.stack_protect_guard ();
+ rtx x, y;
+
+ x = expand_normal (crtl->stack_protect_guard);
+ y = expand_normal (guard_decl);
+
+ /* Allow the target to copy from Y to X without leaking Y into a
+ register. */
+ if (HAVE_stack_protect_set)
+ {
+ rtx insn = gen_stack_protect_set (x, y);
+ if (insn)
+ {
+ emit_insn (insn);
+ return;
+ }
+ }
+
+ /* Otherwise do a straight move. */
+ emit_move_insn (x, y);
+}
/* Translate the intermediate representation contained in the CFG
from GIMPLE trees to RTL.
@@ -277,6 +277,111 @@ clone_function_name (tree decl, const char *suffix)
return get_identifier (tmp_name);
}
+/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
+ return value if SKIP_RETURN is true. */
+
+static tree
+build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
+ bool skip_return)
+{
+ tree new_type = NULL;
+ tree args, new_args = NULL, t;
+ tree new_reversed;
+ int i = 0;
+
+ for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
+ args = TREE_CHAIN (args), i++)
+ if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
+ new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
+
+ new_reversed = nreverse (new_args);
+ if (args)
+ {
+ if (new_reversed)
+ TREE_CHAIN (new_args) = void_list_node;
+ else
+ new_reversed = void_list_node;
+ }
+
+ /* Use copy_node to preserve as much as possible from original type
+ (debug info, attribute lists etc.)
+ Exception is METHOD_TYPEs must have THIS argument.
+ When we are asked to remove it, we need to build new FUNCTION_TYPE
+ instead. */
+ if (TREE_CODE (orig_type) != METHOD_TYPE
+ || !args_to_skip
+ || !bitmap_bit_p (args_to_skip, 0))
+ {
+ new_type = build_distinct_type_copy (orig_type);
+ TYPE_ARG_TYPES (new_type) = new_reversed;
+ }
+ else
+ {
+ new_type
+ = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
+ new_reversed));
+ TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
+ }
+
+ if (skip_return)
+ TREE_TYPE (new_type) = void_type_node;
+
+ /* This is a new type, not a copy of an old type. Need to reassociate
+ variants. We can handle everything except the main variant lazily. */
+ t = TYPE_MAIN_VARIANT (orig_type);
+ if (t != orig_type)
+ {
+ t = build_function_type_skip_args (t, args_to_skip, skip_return);
+ TYPE_MAIN_VARIANT (new_type) = t;
+ TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
+ TYPE_NEXT_VARIANT (t) = new_type;
+ }
+ else
+ {
+ TYPE_MAIN_VARIANT (new_type) = new_type;
+ TYPE_NEXT_VARIANT (new_type) = NULL;
+ }
+
+ return new_type;
+}
+
+/* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
+ return value if SKIP_RETURN is true.
+
+ Arguments from DECL_ARGUMENTS list can't be removed now, since they are
+ linked by TREE_CHAIN directly. The caller is responsible for eliminating
+ them when they are being duplicated (i.e. copy_arguments_for_versioning). */
+
+static tree
+build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
+ bool skip_return)
+{
+ tree new_decl = copy_node (orig_decl);
+ tree new_type;
+
+ new_type = TREE_TYPE (orig_decl);
+ if (prototype_p (new_type)
+ || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
+ new_type
+ = build_function_type_skip_args (new_type, args_to_skip, skip_return);
+ TREE_TYPE (new_decl) = new_type;
+
+ /* For declarations setting DECL_VINDEX (i.e. methods)
+ we expect first argument to be THIS pointer. */
+ if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
+ DECL_VINDEX (new_decl) = NULL_TREE;
+
+ /* When signature changes, we need to clear builtin info. */
+ if (DECL_BUILT_IN (new_decl)
+ && args_to_skip
+ && !bitmap_empty_p (args_to_skip))
+ {
+ DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
+ DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
+ }
+ return new_decl;
+}
+
/* Create callgraph node clone with new declaration. The actual body will
be copied later at compilation stage.
@@ -3337,6 +3337,23 @@ merge_conversion_sequences (conversion *user_seq, conversion *std_seq)
return std_seq;
}
+/* Return the values of the elements of a CONSTRUCTOR as a vector of
+ trees. */
+
+static vec<tree, va_gc> *
+ctor_to_vec (tree ctor)
+{
+ vec<tree, va_gc> *vec;
+ vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
+ unsigned int ix;
+ tree val;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
+ vec->quick_push (val);
+
+ return vec;
+}
+
/* Handle overload resolution for initializing an object of class type from
an initializer list. First we look for a suitable constructor that
takes a std::initializer_list; if we don't find one, we then look for a
@@ -3649,6 +3649,19 @@ initialize_predefined_identifiers (void)
}
}
+/* Returns 1 if the target configuration supports defining public symbols
+ so that one of them will be chosen at link time instead of generating a
+ multiply-defined symbol error, whether through the use of weak symbols or
+ a target-specific mechanism for having duplicates discarded. */
+
+static bool
+supports_one_only (void)
+{
+ if (SUPPORTS_ONE_ONLY)
+ return true;
+ return TARGET_SUPPORTS_WEAK;
+}
+
/* Create the predefined scalar types of C,
and some nodes representing standard constants (0, 1, (void *)0).
Initialize the global binding level.
@@ -13829,6 +13842,23 @@ maybe_save_function_definition (tree fun)
register_constexpr_fundef (fun, DECL_SAVED_TREE (fun));
}
+
+/* Return true if ELEM is part of the chain CHAIN. */
+
+static bool
+chain_member (const_tree elem, const_tree chain)
+{
+ while (chain)
+ {
+ if (elem == chain)
+ return true;
+ chain = DECL_CHAIN (chain);
+ }
+
+ return false;
+}
+
+
/* Finish up a function declaration and compile that function
all the way to assembler language output. The free the storage
for the function definition.
@@ -235,6 +235,18 @@ eliminate_constant_term (rtx x, rtx *constptr)
return x;
}
+/* Returns a tree for the size of EXP in bytes. */
+
+static tree
+tree_expr_size (const_tree exp)
+{
+ if (DECL_P (exp)
+ && DECL_SIZE_UNIT (exp) != 0)
+ return DECL_SIZE_UNIT (exp);
+ else
+ return size_in_bytes (TREE_TYPE (exp));
+}
+
/* Return an rtx for the size in bytes of the value of EXP. */
rtx
@@ -4630,17 +4630,6 @@ mem_ref_refers_to_non_mem_p (tree ref)
return addr_expr_of_non_mem_decl_p_1 (base, false);
}
-/* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
- addressable. This is very much like mem_ref_refers_to_non_mem_p,
- but instead of the MEM_REF, it takes its base, and it doesn't
- assume a DECL is in memory just because its RTL is not set yet. */
-
-bool
-addr_expr_of_non_mem_decl_p (tree op)
-{
- return addr_expr_of_non_mem_decl_p_1 (op, true);
-}
-
/* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
is true, try generating a nontemporal store. */
@@ -5744,6 +5733,23 @@ store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
}
+
+/* Returns the number of FIELD_DECLs in TYPE. */
+
+static int
+fields_length (const_tree type)
+{
+ tree t = TYPE_FIELDS (type);
+ int count = 0;
+
+ for (; t; t = DECL_CHAIN (t))
+ if (TREE_CODE (t) == FIELD_DECL)
+ ++count;
+
+ return count;
+}
+
+
/* Store the value of constructor EXP into the rtx TARGET.
TARGET is either a REG or a MEM; we know it cannot conflict, since
safe_from_p has been called.
@@ -8944,6 +8944,17 @@ pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
return total.low > (unsigned HOST_WIDE_INT) size;
}
+/* Return the HOST_WIDE_INT least significant bits of T, a sizetype
+ kind INTEGER_CST. This makes sure to properly sign-extend the
+ constant. */
+
+static HOST_WIDE_INT
+size_low_cst (const_tree t)
+{
+ double_int d = tree_to_double_int (t);
+ return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
+}
+
/* Subroutine of fold_binary. This routine performs all of the
transformations that are common to the equality/inequality
operators (EQ_EXPR and NE_EXPR) and the ordering operators
@@ -9951,6 +9962,117 @@ mask_with_tz (tree type, double_int x, double_int y)
return x;
}
+/* Return true when T is an address and is known to be nonzero.
+ For floating point we further ensure that T is not denormal.
+ Similar logic is present in nonzero_address in rtlanal.h.
+
+ If the return value is based on the assumption that signed overflow
+ is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+ change *STRICT_OVERFLOW_P. */
+
+static bool
+tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
+{
+ tree type = TREE_TYPE (t);
+ enum tree_code code;
+
+ /* Doing something useful for floating point would need more work. */
+ if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
+ return false;
+
+ code = TREE_CODE (t);
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_unary:
+ return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+ strict_overflow_p);
+ case tcc_binary:
+ case tcc_comparison:
+ return tree_binary_nonzero_warnv_p (code, type,
+ TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1),
+ strict_overflow_p);
+ case tcc_constant:
+ case tcc_declaration:
+ case tcc_reference:
+ return tree_single_nonzero_warnv_p (t, strict_overflow_p);
+
+ default:
+ break;
+ }
+
+ switch (code)
+ {
+ case TRUTH_NOT_EXPR:
+ return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+ strict_overflow_p);
+
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ return tree_binary_nonzero_warnv_p (code, type,
+ TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1),
+ strict_overflow_p);
+
+ case COND_EXPR:
+ case CONSTRUCTOR:
+ case OBJ_TYPE_REF:
+ case ASSERT_EXPR:
+ case ADDR_EXPR:
+ case WITH_SIZE_EXPR:
+ case SSA_NAME:
+ return tree_single_nonzero_warnv_p (t, strict_overflow_p);
+
+ case COMPOUND_EXPR:
+ case MODIFY_EXPR:
+ case BIND_EXPR:
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p);
+
+ case SAVE_EXPR:
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
+
+ case CALL_EXPR:
+ {
+ tree fndecl = get_callee_fndecl (t);
+ if (!fndecl) return false;
+ if (flag_delete_null_pointer_checks && !flag_check_new
+ && DECL_IS_OPERATOR_NEW (fndecl)
+ && !TREE_NOTHROW (fndecl))
+ return true;
+ if (flag_delete_null_pointer_checks
+ && lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
+ return true;
+ return alloca_call_p (t);
+ }
+
+ default:
+ break;
+ }
+ return false;
+}
+
+/* Return true when T is an address and is known to be nonzero.
+ Handle warnings about undefined signed overflow. */
+
+static bool
+tree_expr_nonzero_p (tree t)
+{
+ bool ret, strict_overflow_p;
+
+ strict_overflow_p = false;
+ ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur when "
+ "determining that expression is always "
+ "non-zero"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return ret;
+}
+
/* Fold a binary expression of code CODE and type TYPE with operands
OP0 and OP1. LOC is the location of the resulting expression.
Return the folded expression if folding is successful. Otherwise,
@@ -15244,19 +15366,6 @@ fold_build2_initializer_loc (location_t loc, enum tree_code code,
}
tree
-fold_build3_initializer_loc (location_t loc, enum tree_code code,
- tree type, tree op0, tree op1, tree op2)
-{
- tree result;
- START_FOLD_INIT;
-
- result = fold_build3_loc (loc, code, type, op0, op1, op2);
-
- END_FOLD_INIT;
- return result;
-}
-
-tree
fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
int nargs, tree *argarray)
{
@@ -15777,7 +15886,7 @@ tree_call_nonnegative_warnv_p (tree type, tree fndecl,
set *STRICT_OVERFLOW_P to true; otherwise, don't change
*STRICT_OVERFLOW_P. */
-bool
+static bool
tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
{
enum tree_code code = TREE_CODE (t);
@@ -16146,117 +16255,6 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
return false;
}
-/* Return true when T is an address and is known to be nonzero.
- For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h.
-
- If the return value is based on the assumption that signed overflow
- is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
- change *STRICT_OVERFLOW_P. */
-
-bool
-tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
-{
- tree type = TREE_TYPE (t);
- enum tree_code code;
-
- /* Doing something useful for floating point would need more work. */
- if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
- return false;
-
- code = TREE_CODE (t);
- switch (TREE_CODE_CLASS (code))
- {
- case tcc_unary:
- return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
- strict_overflow_p);
- case tcc_binary:
- case tcc_comparison:
- return tree_binary_nonzero_warnv_p (code, type,
- TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1),
- strict_overflow_p);
- case tcc_constant:
- case tcc_declaration:
- case tcc_reference:
- return tree_single_nonzero_warnv_p (t, strict_overflow_p);
-
- default:
- break;
- }
-
- switch (code)
- {
- case TRUTH_NOT_EXPR:
- return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
- strict_overflow_p);
-
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
- return tree_binary_nonzero_warnv_p (code, type,
- TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1),
- strict_overflow_p);
-
- case COND_EXPR:
- case CONSTRUCTOR:
- case OBJ_TYPE_REF:
- case ASSERT_EXPR:
- case ADDR_EXPR:
- case WITH_SIZE_EXPR:
- case SSA_NAME:
- return tree_single_nonzero_warnv_p (t, strict_overflow_p);
-
- case COMPOUND_EXPR:
- case MODIFY_EXPR:
- case BIND_EXPR:
- return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
- strict_overflow_p);
-
- case SAVE_EXPR:
- return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
- strict_overflow_p);
-
- case CALL_EXPR:
- {
- tree fndecl = get_callee_fndecl (t);
- if (!fndecl) return false;
- if (flag_delete_null_pointer_checks && !flag_check_new
- && DECL_IS_OPERATOR_NEW (fndecl)
- && !TREE_NOTHROW (fndecl))
- return true;
- if (flag_delete_null_pointer_checks
- && lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
- return true;
- return alloca_call_p (t);
- }
-
- default:
- break;
- }
- return false;
-}
-
-/* Return true when T is an address and is known to be nonzero.
- Handle warnings about undefined signed overflow. */
-
-bool
-tree_expr_nonzero_p (tree t)
-{
- bool ret, strict_overflow_p;
-
- strict_overflow_p = false;
- ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
- if (strict_overflow_p)
- fold_overflow_warning (("assuming signed overflow does not occur when "
- "determining that expression is always "
- "non-zero"),
- WARN_STRICT_OVERFLOW_MISC);
- return ret;
-}
-
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
attempt to fold the expression to a constant without modifying TYPE,
OP0 or OP1.
@@ -16272,21 +16270,6 @@ fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
}
-/* Given the components of a unary expression CODE, TYPE and OP0,
- attempt to fold the expression to a constant without modifying
- TYPE or OP0.
-
- If the expression could be simplified to a constant, then return
- the constant. If the expression would not be simplified to a
- constant, then return NULL_TREE. */
-
-tree
-fold_unary_to_constant (enum tree_code code, tree type, tree op0)
-{
- tree tem = fold_unary (code, type, op0);
- return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
-}
-
/* If EXP represents referencing an element in a constant string
(either via pointer arithmetic or array indexing), return the
tree representing the value accessed, otherwise return NULL. */
@@ -130,7 +130,6 @@ static bool contains (const_rtx, htab_t);
static void prepare_function_start (void);
static void do_clobber_return_reg (rtx, void *);
static void do_use_return_reg (rtx, void *);
-static void set_insn_locations (rtx, int) ATTRIBUTE_UNUSED;
/* Stack of nested functions. */
/* Keep track of the cfun stack. */
@@ -4644,51 +4643,6 @@ init_function_start (tree subr)
warning (OPT_Waggregate_return, "function returns an aggregate");
}
-
-void
-expand_main_function (void)
-{
-#if (defined(INVOKE__main) \
- || (!defined(HAS_INIT_SECTION) \
- && !defined(INIT_SECTION_ASM_OP) \
- && !defined(INIT_ARRAY_SECTION_ASM_OP)))
- emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
-#endif
-}
-
-/* Expand code to initialize the stack_protect_guard. This is invoked at
- the beginning of a function to be protected. */
-
-#ifndef HAVE_stack_protect_set
-# define HAVE_stack_protect_set 0
-# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
-#endif
-
-void
-stack_protect_prologue (void)
-{
- tree guard_decl = targetm.stack_protect_guard ();
- rtx x, y;
-
- x = expand_normal (crtl->stack_protect_guard);
- y = expand_normal (guard_decl);
-
- /* Allow the target to copy from Y to X without leaking Y into a
- register. */
- if (HAVE_stack_protect_set)
- {
- rtx insn = gen_stack_protect_set (x, y);
- if (insn)
- {
- emit_insn (insn);
- return;
- }
- }
-
- /* Otherwise do a straight move. */
- emit_move_insn (x, y);
-}
-
/* Expand code to verify the stack_protect_guard. This is invoked at
the end of a function to be protected. */
@@ -5006,6 +4960,19 @@ do_warn_unused_parameter (tree fn)
warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
}
+/* Set the location of the insn chain starting at INSN to LOC. */
+
+static void
+set_insn_locations (rtx insn, int loc)
+{
+ while (insn != NULL_RTX)
+ {
+ if (INSN_P (insn))
+ INSN_LOCATION (insn) = loc;
+ insn = NEXT_INSN (insn);
+ }
+}
+
/* Generate RTL for the end of the current function. */
void
@@ -5335,18 +5302,6 @@ maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
*slot = copy;
}
-/* Set the location of the insn chain starting at INSN to LOC. */
-static void
-set_insn_locations (rtx insn, int loc)
-{
- while (insn != NULL_RTX)
- {
- if (INSN_P (insn))
- INSN_LOCATION (insn) = loc;
- insn = NEXT_INSN (insn);
- }
-}
-
/* Determine if any INSNs in HASH are, or are part of, INSN. Because
we can be running after reorg, SEQUENCE rtl is possible. */
@@ -38,6 +38,7 @@ along with GCC; see the file COPYING3. If not see
#include "ipa-utils.h"
#include "gimple-pretty-print.h"
#include "tree-ssa-address.h"
+#include "langhooks.h"
/* Return true when DECL can be referenced from current unit.
FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
@@ -1710,6 +1711,22 @@ and_var_with_comparison_1 (gimple stmt,
return NULL_TREE;
}
+/* If TYPE is a vector type, return a signed integer vector type with the
+ same width and number of subparts. Otherwise return boolean_type_node. */
+
+static tree
+truth_type_for (tree type)
+{
+ if (TREE_CODE (type) == VECTOR_TYPE)
+ {
+ tree elem = lang_hooks.types.type_for_size
+ (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
+ return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
+ }
+ else
+ return boolean_type_node;
+}
+
/* Try to simplify the AND of two comparisons defined by
(OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
If this can be done without constructing an intermediate value,
@@ -723,6 +723,21 @@ add_interface (tree this_class, tree interface_class)
BINFO_BASE_APPEND (TYPE_BINFO (this_class), interface_binfo);
}
+/* Construct, lay out and return the type of methods belonging to class
+ BASETYPE and whose arguments and values are described by TYPE.
+ If that type exists already, reuse it.
+ TYPE must be a FUNCTION_TYPE node. */
+
+static tree
+build_method_type (tree basetype, tree type)
+{
+ gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
+
+ return build_method_type_directly (basetype,
+ TREE_TYPE (type),
+ TYPE_ARG_TYPES (type));
+}
+
static tree
build_java_method_type (tree fntype, tree this_class, int access_flags)
{
@@ -960,30 +960,6 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
fprintf (file, ">");
}
-/* Print the tree vector VEC in full on file FILE, preceded by PREFIX,
- starting in column INDENT. */
-
-void
-print_vec_tree (FILE *file, const char *prefix, vec<tree, va_gc> *vec, int indent)
-{
- tree elt;
- unsigned ix;
-
- /* Indent to the specified column, since this is the long form. */
- indent_to (file, indent);
-
- /* Print the slot this node is in, and its code, and address. */
- fprintf (file, "%s <VEC", prefix);
- dump_addr (file, " ", vec->address ());
-
- FOR_EACH_VEC_ELT (*vec, ix, elt)
- {
- char temp[10];
- sprintf (temp, "elt %d", ix);
- print_node (file, temp, elt, indent + 4);
- }
-}
-
/* Print the node NODE on standard error, for debugging.
Most nodes referred to by this one are printed recursively
@@ -102,13 +102,8 @@ typedef struct case_node *case_node_ptr;
extern basic_block label_to_block_fn (struct function *, tree);
-static int n_occurrences (int, const char *);
-static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
-static bool check_operand_nalternatives (tree, tree);
static bool check_unique_operand_names (tree, tree, tree);
static char *resolve_operand_name_1 (char *, tree, tree, tree);
-static void expand_null_return_1 (void);
-static void expand_value_return (rtx);
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
@@ -157,20 +152,6 @@ emit_jump (rtx label)
emit_jump_insn (gen_jump (label));
emit_barrier ();
}
-
-/* Emit code to jump to the address
- specified by the pointer expression EXP. */
-
-void
-expand_computed_goto (tree exp)
-{
- rtx x = expand_normal (exp);
-
- x = convert_memory_address (Pmode, x);
-
- do_pending_stack_adjust ();
- emit_indirect_jump (x);
-}
/* Handle goto statements and the labels that they can go to. */
@@ -209,56 +190,7 @@ expand_label (tree label)
if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
maybe_set_first_label_num (label_r);
}
-
-/* Generate RTL code for a `goto' statement with target label LABEL.
- LABEL should be a LABEL_DECL tree node that was or will later be
- defined with `expand_label'. */
-
-void
-expand_goto (tree label)
-{
-#ifdef ENABLE_CHECKING
- /* Check for a nonlocal goto to a containing function. Should have
- gotten translated to __builtin_nonlocal_goto. */
- tree context = decl_function_context (label);
- gcc_assert (!context || context == current_function_decl);
-#endif
-
- emit_jump (label_rtx (label));
-}
-
-/* Return the number of times character C occurs in string S. */
-static int
-n_occurrences (int c, const char *s)
-{
- int n = 0;
- while (*s)
- n += (*s++ == c);
- return n;
-}
-/* Generate RTL for an asm statement (explicit assembler code).
- STRING is a STRING_CST node containing the assembler code text,
- or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
- insn is volatile; don't optimize it. */
-
-static void
-expand_asm_loc (tree string, int vol, location_t locus)
-{
- rtx body;
-
- if (TREE_CODE (string) == ADDR_EXPR)
- string = TREE_OPERAND (string, 0);
-
- body = gen_rtx_ASM_INPUT_loc (VOIDmode,
- ggc_strdup (TREE_STRING_POINTER (string)),
- locus);
-
- MEM_VOLATILE_P (body) = vol;
-
- emit_insn (body);
-}
-
/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
inputs and NOUTPUTS outputs to this extended-asm. Upon return,
@@ -577,663 +509,6 @@ tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
}
-/* Check for overlap between registers marked in CLOBBERED_REGS and
- anything inappropriate in T. Emit error and return the register
- variable definition for error, NULL_TREE for ok. */
-
-static bool
-tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
-{
- /* Conflicts between asm-declared register variables and the clobber
- list are not allowed. */
- tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
-
- if (overlap)
- {
- error ("asm-specifier for variable %qE conflicts with asm clobber list",
- DECL_NAME (overlap));
-
- /* Reset registerness to stop multiple errors emitted for a single
- variable. */
- DECL_REGISTER (overlap) = 0;
- return true;
- }
-
- return false;
-}
-
-/* Generate RTL for an asm statement with arguments.
- STRING is the instruction template.
- OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
- Each output or input has an expression in the TREE_VALUE and
- a tree list in TREE_PURPOSE which in turn contains a constraint
- name in TREE_VALUE (or NULL_TREE) and a constraint string
- in TREE_PURPOSE.
- CLOBBERS is a list of STRING_CST nodes each naming a hard register
- that is clobbered by this insn.
-
- LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
- should be the fallthru basic block of the asm goto.
-
- Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
- Some elements of OUTPUTS may be replaced with trees representing temporary
- values. The caller should copy those temporary values to the originally
- specified lvalues.
-
- VOL nonzero means the insn is volatile; don't optimize it. */
-
-static void
-expand_asm_operands (tree string, tree outputs, tree inputs,
- tree clobbers, tree labels, basic_block fallthru_bb,
- int vol, location_t locus)
-{
- rtvec argvec, constraintvec, labelvec;
- rtx body;
- int ninputs = list_length (inputs);
- int noutputs = list_length (outputs);
- int nlabels = list_length (labels);
- int ninout;
- int nclobbers;
- HARD_REG_SET clobbered_regs;
- int clobber_conflict_found = 0;
- tree tail;
- tree t;
- int i;
- /* Vector of RTX's of evaluated output operands. */
- rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
- int *inout_opnum = XALLOCAVEC (int, noutputs);
- rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
- enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
- const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
- int old_generating_concat_p = generating_concat_p;
- rtx fallthru_label = NULL_RTX;
-
- /* An ASM with no outputs needs to be treated as volatile, for now. */
- if (noutputs == 0)
- vol = 1;
-
- if (! check_operand_nalternatives (outputs, inputs))
- return;
-
- string = resolve_asm_operand_names (string, outputs, inputs, labels);
-
- /* Collect constraints. */
- i = 0;
- for (t = outputs; t ; t = TREE_CHAIN (t), i++)
- constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
- for (t = inputs; t ; t = TREE_CHAIN (t), i++)
- constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
-
- /* Sometimes we wish to automatically clobber registers across an asm.
- Case in point is when the i386 backend moved from cc0 to a hard reg --
- maintaining source-level compatibility means automatically clobbering
- the flags register. */
- clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
-
- /* Count the number of meaningful clobbered registers, ignoring what
- we would ignore later. */
- nclobbers = 0;
- CLEAR_HARD_REG_SET (clobbered_regs);
- for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
- {
- const char *regname;
- int nregs;
-
- if (TREE_VALUE (tail) == error_mark_node)
- return;
- regname = TREE_STRING_POINTER (TREE_VALUE (tail));
-
- i = decode_reg_name_and_count (regname, &nregs);
- if (i == -4)
- ++nclobbers;
- else if (i == -2)
- error ("unknown register name %qs in %<asm%>", regname);
-
- /* Mark clobbered registers. */
- if (i >= 0)
- {
- int reg;
-
- for (reg = i; reg < i + nregs; reg++)
- {
- ++nclobbers;
-
- /* Clobbering the PIC register is an error. */
- if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
- {
- error ("PIC register clobbered by %qs in %<asm%>", regname);
- return;
- }
-
- SET_HARD_REG_BIT (clobbered_regs, reg);
- }
- }
- }
-
- /* First pass over inputs and outputs checks validity and sets
- mark_addressable if needed. */
-
- ninout = 0;
- for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
- {
- tree val = TREE_VALUE (tail);
- tree type = TREE_TYPE (val);
- const char *constraint;
- bool is_inout;
- bool allows_reg;
- bool allows_mem;
-
- /* If there's an erroneous arg, emit no insn. */
- if (type == error_mark_node)
- return;
-
- /* Try to parse the output constraint. If that fails, there's
- no point in going further. */
- constraint = constraints[i];
- if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
- &allows_mem, &allows_reg, &is_inout))
- return;
-
- if (! allows_reg
- && (allows_mem
- || is_inout
- || (DECL_P (val)
- && REG_P (DECL_RTL (val))
- && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
- mark_addressable (val);
-
- if (is_inout)
- ninout++;
- }
-
- ninputs += ninout;
- if (ninputs + noutputs > MAX_RECOG_OPERANDS)
- {
- error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
- return;
- }
-
- for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
- {
- bool allows_reg, allows_mem;
- const char *constraint;
-
- /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
- would get VOIDmode and that could cause a crash in reload. */
- if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
- return;
-
- constraint = constraints[i + noutputs];
- if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
- constraints, &allows_mem, &allows_reg))
- return;
-
- if (! allows_reg && allows_mem)
- mark_addressable (TREE_VALUE (tail));
- }
-
- /* Second pass evaluates arguments. */
-
- /* Make sure stack is consistent for asm goto. */
- if (nlabels > 0)
- do_pending_stack_adjust ();
-
- ninout = 0;
- for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
- {
- tree val = TREE_VALUE (tail);
- tree type = TREE_TYPE (val);
- bool is_inout;
- bool allows_reg;
- bool allows_mem;
- rtx op;
- bool ok;
-
- ok = parse_output_constraint (&constraints[i], i, ninputs,
- noutputs, &allows_mem, &allows_reg,
- &is_inout);
- gcc_assert (ok);
-
- /* If an output operand is not a decl or indirect ref and our constraint
- allows a register, make a temporary to act as an intermediate.
- Make the asm insn write into that, then our caller will copy it to
- the real output operand. Likewise for promoted variables. */
-
- generating_concat_p = 0;
-
- real_output_rtx[i] = NULL_RTX;
- if ((TREE_CODE (val) == INDIRECT_REF
- && allows_mem)
- || (DECL_P (val)
- && (allows_mem || REG_P (DECL_RTL (val)))
- && ! (REG_P (DECL_RTL (val))
- && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
- || ! allows_reg
- || is_inout)
- {
- op = expand_expr (val, NULL_RTX, VOIDmode,
- !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
- if (MEM_P (op))
- op = validize_mem (op);
-
- if (! allows_reg && !MEM_P (op))
- error ("output number %d not directly addressable", i);
- if ((! allows_mem && MEM_P (op))
- || GET_CODE (op) == CONCAT)
- {
- real_output_rtx[i] = op;
- op = gen_reg_rtx (GET_MODE (op));
- if (is_inout)
- emit_move_insn (op, real_output_rtx[i]);
- }
- }
- else
- {
- op = assign_temp (type, 0, 1);
- op = validize_mem (op);
- if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
- set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
- TREE_VALUE (tail) = make_tree (type, op);
- }
- output_rtx[i] = op;
-
- generating_concat_p = old_generating_concat_p;
-
- if (is_inout)
- {
- inout_mode[ninout] = TYPE_MODE (type);
- inout_opnum[ninout++] = i;
- }
-
- if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
- clobber_conflict_found = 1;
- }
-
- /* Make vectors for the expression-rtx, constraint strings,
- and named operands. */
-
- argvec = rtvec_alloc (ninputs);
- constraintvec = rtvec_alloc (ninputs);
- labelvec = rtvec_alloc (nlabels);
-
- body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
- : GET_MODE (output_rtx[0])),
- ggc_strdup (TREE_STRING_POINTER (string)),
- empty_string, 0, argvec, constraintvec,
- labelvec, locus);
-
- MEM_VOLATILE_P (body) = vol;
-
- /* Eval the inputs and put them into ARGVEC.
- Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
-
- for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
- {
- bool allows_reg, allows_mem;
- const char *constraint;
- tree val, type;
- rtx op;
- bool ok;
-
- constraint = constraints[i + noutputs];
- ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
- constraints, &allows_mem, &allows_reg);
- gcc_assert (ok);
-
- generating_concat_p = 0;
-
- val = TREE_VALUE (tail);
- type = TREE_TYPE (val);
- /* EXPAND_INITIALIZER will not generate code for valid initializer
- constants, but will still generate code for other types of operand.
- This is the behavior we want for constant constraints. */
- op = expand_expr (val, NULL_RTX, VOIDmode,
- allows_reg ? EXPAND_NORMAL
- : allows_mem ? EXPAND_MEMORY
- : EXPAND_INITIALIZER);
-
- /* Never pass a CONCAT to an ASM. */
- if (GET_CODE (op) == CONCAT)
- op = force_reg (GET_MODE (op), op);
- else if (MEM_P (op))
- op = validize_mem (op);
-
- if (asm_operand_ok (op, constraint, NULL) <= 0)
- {
- if (allows_reg && TYPE_MODE (type) != BLKmode)
- op = force_reg (TYPE_MODE (type), op);
- else if (!allows_mem)
- warning (0, "asm operand %d probably doesn%'t match constraints",
- i + noutputs);
- else if (MEM_P (op))
- {
- /* We won't recognize either volatile memory or memory
- with a queued address as available a memory_operand
- at this point. Ignore it: clearly this *is* a memory. */
- }
- else
- gcc_unreachable ();
- }
-
- generating_concat_p = old_generating_concat_p;
- ASM_OPERANDS_INPUT (body, i) = op;
-
- ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
- = gen_rtx_ASM_INPUT (TYPE_MODE (type),
- ggc_strdup (constraints[i + noutputs]));
-
- if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
- clobber_conflict_found = 1;
- }
-
- /* Protect all the operands from the queue now that they have all been
- evaluated. */
-
- generating_concat_p = 0;
-
- /* For in-out operands, copy output rtx to input rtx. */
- for (i = 0; i < ninout; i++)
- {
- int j = inout_opnum[i];
- char buffer[16];
-
- ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
- = output_rtx[j];
-
- sprintf (buffer, "%d", j);
- ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
- = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
- }
-
- /* Copy labels to the vector. */
- for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
- {
- rtx r;
- /* If asm goto has any labels in the fallthru basic block, use
- a label that we emit immediately after the asm goto. Expansion
- may insert further instructions into the same basic block after
- asm goto and if we don't do this, insertion of instructions on
- the fallthru edge might misbehave. See PR58670. */
- if (fallthru_bb
- && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
- {
- if (fallthru_label == NULL_RTX)
- fallthru_label = gen_label_rtx ();
- r = fallthru_label;
- }
- else
- r = label_rtx (TREE_VALUE (tail));
- ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
- }
-
- generating_concat_p = old_generating_concat_p;
-
- /* Now, for each output, construct an rtx
- (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
- ARGVEC CONSTRAINTS OPNAMES))
- If there is more than one, put them inside a PARALLEL. */
-
- if (nlabels > 0 && nclobbers == 0)
- {
- gcc_assert (noutputs == 0);
- emit_jump_insn (body);
- }
- else if (noutputs == 0 && nclobbers == 0)
- {
- /* No output operands: put in a raw ASM_OPERANDS rtx. */
- emit_insn (body);
- }
- else if (noutputs == 1 && nclobbers == 0)
- {
- ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
- emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
- }
- else
- {
- rtx obody = body;
- int num = noutputs;
-
- if (num == 0)
- num = 1;
-
- body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
-
- /* For each output operand, store a SET. */
- for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
- {
- XVECEXP (body, 0, i)
- = gen_rtx_SET (VOIDmode,
- output_rtx[i],
- gen_rtx_ASM_OPERANDS
- (GET_MODE (output_rtx[i]),
- ggc_strdup (TREE_STRING_POINTER (string)),
- ggc_strdup (constraints[i]),
- i, argvec, constraintvec, labelvec, locus));
-
- MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
- }
-
- /* If there are no outputs (but there are some clobbers)
- store the bare ASM_OPERANDS into the PARALLEL. */
-
- if (i == 0)
- XVECEXP (body, 0, i++) = obody;
-
- /* Store (clobber REG) for each clobbered register specified. */
-
- for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
- {
- const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
- int reg, nregs;
- int j = decode_reg_name_and_count (regname, &nregs);
- rtx clobbered_reg;
-
- if (j < 0)
- {
- if (j == -3) /* `cc', which is not a register */
- continue;
-
- if (j == -4) /* `memory', don't cache memory across asm */
- {
- XVECEXP (body, 0, i++)
- = gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM
- (BLKmode,
- gen_rtx_SCRATCH (VOIDmode)));
- continue;
- }
-
- /* Ignore unknown register, error already signaled. */
- continue;
- }
-
- for (reg = j; reg < j + nregs; reg++)
- {
- /* Use QImode since that's guaranteed to clobber just
- * one reg. */
- clobbered_reg = gen_rtx_REG (QImode, reg);
-
- /* Do sanity check for overlap between clobbers and
- respectively input and outputs that hasn't been
- handled. Such overlap should have been detected and
- reported above. */
- if (!clobber_conflict_found)
- {
- int opno;
-
- /* We test the old body (obody) contents to avoid
- tripping over the under-construction body. */
- for (opno = 0; opno < noutputs; opno++)
- if (reg_overlap_mentioned_p (clobbered_reg,
- output_rtx[opno]))
- internal_error
- ("asm clobber conflict with output operand");
-
- for (opno = 0; opno < ninputs - ninout; opno++)
- if (reg_overlap_mentioned_p (clobbered_reg,
- ASM_OPERANDS_INPUT (obody,
- opno)))
- internal_error
- ("asm clobber conflict with input operand");
- }
-
- XVECEXP (body, 0, i++)
- = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
- }
- }
-
- if (nlabels > 0)
- emit_jump_insn (body);
- else
- emit_insn (body);
- }
-
- if (fallthru_label)
- emit_label (fallthru_label);
-
- /* For any outputs that needed reloading into registers, spill them
- back to where they belong. */
- for (i = 0; i < noutputs; ++i)
- if (real_output_rtx[i])
- emit_move_insn (real_output_rtx[i], output_rtx[i]);
-
- crtl->has_asm_statement = 1;
- free_temp_slots ();
-}
-
-void
-expand_asm_stmt (gimple stmt)
-{
- int noutputs;
- tree outputs, tail, t;
- tree *o;
- size_t i, n;
- const char *s;
- tree str, out, in, cl, labels;
- location_t locus = gimple_location (stmt);
- basic_block fallthru_bb = NULL;
-
- /* Meh... convert the gimple asm operands into real tree lists.
- Eventually we should make all routines work on the vectors instead
- of relying on TREE_CHAIN. */
- out = NULL_TREE;
- n = gimple_asm_noutputs (stmt);
- if (n > 0)
- {
- t = out = gimple_asm_output_op (stmt, 0);
- for (i = 1; i < n; i++)
- t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
- }
-
- in = NULL_TREE;
- n = gimple_asm_ninputs (stmt);
- if (n > 0)
- {
- t = in = gimple_asm_input_op (stmt, 0);
- for (i = 1; i < n; i++)
- t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
- }
-
- cl = NULL_TREE;
- n = gimple_asm_nclobbers (stmt);
- if (n > 0)
- {
- t = cl = gimple_asm_clobber_op (stmt, 0);
- for (i = 1; i < n; i++)
- t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
- }
-
- labels = NULL_TREE;
- n = gimple_asm_nlabels (stmt);
- if (n > 0)
- {
- edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
- if (fallthru)
- fallthru_bb = fallthru->dest;
- t = labels = gimple_asm_label_op (stmt, 0);
- for (i = 1; i < n; i++)
- t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
- }
-
- s = gimple_asm_string (stmt);
- str = build_string (strlen (s), s);
-
- if (gimple_asm_input_p (stmt))
- {
- expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
- return;
- }
-
- outputs = out;
- noutputs = gimple_asm_noutputs (stmt);
- /* o[I] is the place that output number I should be written. */
- o = (tree *) alloca (noutputs * sizeof (tree));
-
- /* Record the contents of OUTPUTS before it is modified. */
- for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
- o[i] = TREE_VALUE (tail);
-
- /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
- OUTPUTS some trees for where the values were actually stored. */
- expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
- gimple_asm_volatile_p (stmt), locus);
-
- /* Copy all the intermediate outputs into the specified outputs. */
- for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
- {
- if (o[i] != TREE_VALUE (tail))
- {
- expand_assignment (o[i], TREE_VALUE (tail), false);
- free_temp_slots ();
-
- /* Restore the original value so that it's correct the next
- time we expand this function. */
- TREE_VALUE (tail) = o[i];
- }
- }
-}
-
-/* A subroutine of expand_asm_operands. Check that all operands have
- the same number of alternatives. Return true if so. */
-
-static bool
-check_operand_nalternatives (tree outputs, tree inputs)
-{
- if (outputs || inputs)
- {
- tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
- int nalternatives
- = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
- tree next = inputs;
-
- if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
- {
- error ("too many alternatives in %<asm%>");
- return false;
- }
-
- tmp = outputs;
- while (tmp)
- {
- const char *constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
-
- if (n_occurrences (',', constraint) != nalternatives)
- {
- error ("operand constraints for %<asm%> differ "
- "in number of alternatives");
- return false;
- }
-
- if (TREE_CHAIN (tmp))
- tmp = TREE_CHAIN (tmp);
- else
- tmp = next, next = 0;
- }
- }
-
- return true;
-}
/* A subroutine of expand_asm_operands. Check that all operand names
are unique. Return true if so. We rely on the fact that these names
@@ -1427,19 +702,6 @@ resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
return p;
}
-/* Generate RTL to return from the current function, with no value.
- (That is, we do not do anything about returning any value.) */
-
-void
-expand_null_return (void)
-{
- /* If this function was declared to return a value, but we
- didn't, clobber the return registers so that they are not
- propagated live to the rest of the function. */
- clobber_return_register ();
-
- expand_null_return_1 ();
-}
/* Generate RTL to return directly from the current function.
(That is, we bypass any return value.) */
@@ -1459,154 +721,6 @@ expand_naked_return (void)
emit_jump (end_label);
}
-/* Generate RTL to return from the current function, with value VAL. */
-
-static void
-expand_value_return (rtx val)
-{
- /* Copy the value to the return location unless it's already there. */
-
- tree decl = DECL_RESULT (current_function_decl);
- rtx return_reg = DECL_RTL (decl);
- if (return_reg != val)
- {
- tree funtype = TREE_TYPE (current_function_decl);
- tree type = TREE_TYPE (decl);
- int unsignedp = TYPE_UNSIGNED (type);
- enum machine_mode old_mode = DECL_MODE (decl);
- enum machine_mode mode;
- if (DECL_BY_REFERENCE (decl))
- mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
- else
- mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
-
- if (mode != old_mode)
- val = convert_modes (mode, old_mode, val, unsignedp);
-
- if (GET_CODE (return_reg) == PARALLEL)
- emit_group_load (return_reg, val, type, int_size_in_bytes (type));
- else
- emit_move_insn (return_reg, val);
- }
-
- expand_null_return_1 ();
-}
-
-/* Output a return with no value. */
-
-static void
-expand_null_return_1 (void)
-{
- clear_pending_stack_adjust ();
- do_pending_stack_adjust ();
- emit_jump (return_label);
-}
-
-/* Generate RTL to evaluate the expression RETVAL and return it
- from the current function. */
-
-void
-expand_return (tree retval)
-{
- rtx result_rtl;
- rtx val = 0;
- tree retval_rhs;
-
- /* If function wants no value, give it none. */
- if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
- {
- expand_normal (retval);
- expand_null_return ();
- return;
- }
-
- if (retval == error_mark_node)
- {
- /* Treat this like a return of no value from a function that
- returns a value. */
- expand_null_return ();
- return;
- }
- else if ((TREE_CODE (retval) == MODIFY_EXPR
- || TREE_CODE (retval) == INIT_EXPR)
- && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
- retval_rhs = TREE_OPERAND (retval, 1);
- else
- retval_rhs = retval;
-
- result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
-
- /* If we are returning the RESULT_DECL, then the value has already
- been stored into it, so we don't have to do anything special. */
- if (TREE_CODE (retval_rhs) == RESULT_DECL)
- expand_value_return (result_rtl);
-
- /* If the result is an aggregate that is being returned in one (or more)
- registers, load the registers here. */
-
- else if (retval_rhs != 0
- && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
- && REG_P (result_rtl))
- {
- val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
- if (val)
- {
- /* Use the mode of the result value on the return register. */
- PUT_MODE (result_rtl, GET_MODE (val));
- expand_value_return (val);
- }
- else
- expand_null_return ();
- }
- else if (retval_rhs != 0
- && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
- && (REG_P (result_rtl)
- || (GET_CODE (result_rtl) == PARALLEL)))
- {
- /* Calculate the return value into a temporary (usually a pseudo
- reg). */
- tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
- tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
-
- val = assign_temp (nt, 0, 1);
- val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
- val = force_not_mem (val);
- /* Return the calculated value. */
- expand_value_return (val);
- }
- else
- {
- /* No hard reg used; calculate value into hard return reg. */
- expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
- expand_value_return (result_rtl);
- }
-}
-
-
-/* Emit code to save the current value of stack. */
-rtx
-expand_stack_save (void)
-{
- rtx ret = NULL_RTX;
-
- do_pending_stack_adjust ();
- emit_stack_save (SAVE_BLOCK, &ret);
- return ret;
-}
-
-/* Emit code to restore the current value of stack. */
-void
-expand_stack_restore (tree var)
-{
- rtx prev, sa = expand_normal (var);
-
- sa = convert_memory_address (Pmode, sa);
-
- prev = get_last_insn ();
- emit_stack_restore (SAVE_BLOCK, sa);
- fixup_args_size_notes (prev, get_last_insn (), 0);
-}
-
/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
is the probability of jumping to LABEL. */
static void
@@ -33,6 +33,7 @@ along with GCC; see the file COPYING3. If not see
#include "timevar.h"
#include "lto-streamer.h"
#include "rtl.h"
+#include "output.h"
const char * const ld_plugin_symbol_resolution_names[]=
{
@@ -81,6 +82,28 @@ eq_node (const void *p1, const void *p2)
return DECL_UID (n1->decl) == DECL_UID (n2->decl);
}
+/* Hash asmnames ignoring the user specified marks. */
+
+static hashval_t
+decl_assembler_name_hash (const_tree asmname)
+{
+ if (IDENTIFIER_POINTER (asmname)[0] == '*')
+ {
+ const char *decl_str = IDENTIFIER_POINTER (asmname) + 1;
+ size_t ulp_len = strlen (user_label_prefix);
+
+ if (ulp_len == 0)
+ ;
+ else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
+ decl_str += ulp_len;
+
+ return htab_hash_string (decl_str);
+ }
+
+ return htab_hash_string (IDENTIFIER_POINTER (asmname));
+}
+
+
/* Returns a hash code for P. */
static hashval_t
@@ -90,6 +113,62 @@ hash_node_by_assembler_name (const void *p)
return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
}
+/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
+
+static bool
+decl_assembler_name_equal (tree decl, const_tree asmname)
+{
+ tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
+ const char *decl_str;
+ const char *asmname_str;
+ bool test = false;
+
+ if (decl_asmname == asmname)
+ return true;
+
+ decl_str = IDENTIFIER_POINTER (decl_asmname);
+ asmname_str = IDENTIFIER_POINTER (asmname);
+
+
+ /* If the target assembler name was set by the user, things are trickier.
+ We have a leading '*' to begin with. After that, it's arguable what
+ is the correct thing to do with -fleading-underscore. Arguably, we've
+ historically been doing the wrong thing in assemble_alias by always
+ printing the leading underscore. Since we're not changing that, make
+ sure user_label_prefix follows the '*' before matching. */
+ if (decl_str[0] == '*')
+ {
+ size_t ulp_len = strlen (user_label_prefix);
+
+ decl_str ++;
+
+ if (ulp_len == 0)
+ test = true;
+ else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
+ decl_str += ulp_len, test=true;
+ else
+ decl_str --;
+ }
+ if (asmname_str[0] == '*')
+ {
+ size_t ulp_len = strlen (user_label_prefix);
+
+ asmname_str ++;
+
+ if (ulp_len == 0)
+ test = true;
+ else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0)
+ asmname_str += ulp_len, test=true;
+ else
+ asmname_str --;
+ }
+
+ if (!test)
+ return false;
+ return strcmp (decl_str, asmname_str) == 0;
+}
+
+
/* Returns nonzero if P1 and P2 are equal. */
static int
@@ -592,6 +592,12 @@ diagnose_tm_1_op (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
return NULL_TREE;
}
+static inline bool
+is_tm_safe_or_pure (const_tree x)
+{
+ return is_tm_safe (x) || is_tm_pure (x);
+}
+
static tree
diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
@@ -2506,6 +2506,68 @@ operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
&handled);
}
+
+/* Returns true if it is possible to prove that the index of
+ an array access REF (an ARRAY_REF expression) falls into the
+ array bounds. */
+
+static bool
+in_array_bounds_p (tree ref)
+{
+ tree idx = TREE_OPERAND (ref, 1);
+ tree min, max;
+
+ if (TREE_CODE (idx) != INTEGER_CST)
+ return false;
+
+ min = array_ref_low_bound (ref);
+ max = array_ref_up_bound (ref);
+ if (!min
+ || !max
+ || TREE_CODE (min) != INTEGER_CST
+ || TREE_CODE (max) != INTEGER_CST)
+ return false;
+
+ if (tree_int_cst_lt (idx, min)
+ || tree_int_cst_lt (max, idx))
+ return false;
+
+ return true;
+}
+
+/* Returns true if it is possible to prove that the range of
+ an array access REF (an ARRAY_RANGE_REF expression) falls
+ into the array bounds. */
+
+static bool
+range_in_array_bounds_p (tree ref)
+{
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
+ tree range_min, range_max, min, max;
+
+ range_min = TYPE_MIN_VALUE (domain_type);
+ range_max = TYPE_MAX_VALUE (domain_type);
+ if (!range_min
+ || !range_max
+ || TREE_CODE (range_min) != INTEGER_CST
+ || TREE_CODE (range_max) != INTEGER_CST)
+ return false;
+
+ min = array_ref_low_bound (ref);
+ max = array_ref_up_bound (ref);
+ if (!min
+ || !max
+ || TREE_CODE (min) != INTEGER_CST
+ || TREE_CODE (max) != INTEGER_CST)
+ return false;
+
+ if (tree_int_cst_lt (range_min, min)
+ || tree_int_cst_lt (max, range_max))
+ return false;
+
+ return true;
+}
+
/* Return true if EXPR can trap, as in dereferencing an invalid pointer
location or floating point arithmetic. C.f. the rtl version, may_trap_p.
This routine expects only GIMPLE lhs or rhs input. */
@@ -1181,7 +1181,7 @@ init_object_sizes (void)
/* Destroy data structures after the object size computation. */
-void
+static void
fini_object_sizes (void)
{
int object_size_type;
@@ -536,6 +536,29 @@ hashable_expr_equal_p (const struct hashable_expr *expr0,
}
}
+/* Generate a hash value for a pair of expressions. This can be used
+ iteratively by passing a previous result as the VAL argument.
+
+ The same hash value is always returned for a given pair of expressions,
+ regardless of the order in which they are presented. This is useful in
+ hashing the operands of commutative functions. */
+
+static hashval_t
+iterative_hash_exprs_commutative (const_tree t1,
+ const_tree t2, hashval_t val)
+{
+ hashval_t one = iterative_hash_expr (t1, 0);
+ hashval_t two = iterative_hash_expr (t2, 0);
+ hashval_t t;
+
+ if (one > two)
+ t = one, one = two, two = t;
+ val = iterative_hash_hashval_t (one, val);
+ val = iterative_hash_hashval_t (two, val);
+
+ return val;
+}
+
/* Compute a hash value for a hashable_expr value EXPR and a
previously accumulated hash value VAL. If two hashable_expr
values compare equal with hashable_expr_equal_p, they must
@@ -1672,6 +1672,36 @@ verify_symbolic_number_p (struct symbolic_number *n, gimple stmt)
return true;
}
+/* Return value of a constant X and sign-extend it. */
+
+static HOST_WIDEST_INT
+widest_int_cst_value (const_tree x)
+{
+ unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
+ unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
+
+#if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
+ gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
+ val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_HIGH (x))
+ << HOST_BITS_PER_WIDE_INT);
+#else
+ /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
+ gcc_assert (TREE_INT_CST_HIGH (x) == 0
+ || TREE_INT_CST_HIGH (x) == -1);
+#endif
+
+ if (bits < HOST_BITS_PER_WIDEST_INT)
+ {
+ bool negative = ((val >> (bits - 1)) & 1) != 0;
+ if (negative)
+ val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
+ else
+ val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
+ }
+
+ return val;
+}
+
/* find_bswap_1 invokes itself recursively with N and tries to perform
the operation given by the rhs of STMT on the result. If the
operation could successfully be executed the function returns the
@@ -1172,6 +1172,22 @@ operand_less_p (tree val, tree val2)
return 0;
}
+
+/* Given the components of a unary expression CODE, TYPE and OP0,
+ attempt to fold the expression to a constant without modifying
+ TYPE or OP0.
+
+ If the expression could be simplified to a constant, then return
+ the constant. If the expression would not be simplified to a
+ constant, then return NULL_TREE. */
+
+static tree
+fold_unary_to_constant (enum tree_code code, tree type, tree op0)
+{
+ tree tem = fold_unary (code, type, op0);
+ return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
+}
+
/* Compare two values VAL1 and VAL2. Return
-2 if VAL1 and VAL2 cannot be compared at compile-time,
@@ -1464,24 +1480,6 @@ value_range_nonnegative_p (value_range_t *vr)
return false;
}
-/* Return true if T, an SSA_NAME, is known to be nonnegative. Return
- false otherwise or if no value range information is available. */
-
-bool
-ssa_name_nonnegative_p (const_tree t)
-{
- value_range_t *vr = get_value_range (t);
-
- if (INTEGRAL_TYPE_P (t)
- && TYPE_UNSIGNED (t))
- return true;
-
- if (!vr)
- return false;
-
- return value_range_nonnegative_p (vr);
-}
-
/* If *VR has a value rante that is a single constant value return that,
otherwise return NULL_TREE. */
@@ -588,82 +588,6 @@ decl_assembler_name (tree decl)
return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
}
-/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
-
-bool
-decl_assembler_name_equal (tree decl, const_tree asmname)
-{
- tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
- const char *decl_str;
- const char *asmname_str;
- bool test = false;
-
- if (decl_asmname == asmname)
- return true;
-
- decl_str = IDENTIFIER_POINTER (decl_asmname);
- asmname_str = IDENTIFIER_POINTER (asmname);
-
-
- /* If the target assembler name was set by the user, things are trickier.
- We have a leading '*' to begin with. After that, it's arguable what
- is the correct thing to do with -fleading-underscore. Arguably, we've
- historically been doing the wrong thing in assemble_alias by always
- printing the leading underscore. Since we're not changing that, make
- sure user_label_prefix follows the '*' before matching. */
- if (decl_str[0] == '*')
- {
- size_t ulp_len = strlen (user_label_prefix);
-
- decl_str ++;
-
- if (ulp_len == 0)
- test = true;
- else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
- decl_str += ulp_len, test=true;
- else
- decl_str --;
- }
- if (asmname_str[0] == '*')
- {
- size_t ulp_len = strlen (user_label_prefix);
-
- asmname_str ++;
-
- if (ulp_len == 0)
- test = true;
- else if (strncmp (asmname_str, user_label_prefix, ulp_len) == 0)
- asmname_str += ulp_len, test=true;
- else
- asmname_str --;
- }
-
- if (!test)
- return false;
- return strcmp (decl_str, asmname_str) == 0;
-}
-
-/* Hash asmnames ignoring the user specified marks. */
-
-hashval_t
-decl_assembler_name_hash (const_tree asmname)
-{
- if (IDENTIFIER_POINTER (asmname)[0] == '*')
- {
- const char *decl_str = IDENTIFIER_POINTER (asmname) + 1;
- size_t ulp_len = strlen (user_label_prefix);
-
- if (ulp_len == 0)
- ;
- else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
- decl_str += ulp_len;
-
- return htab_hash_string (decl_str);
- }
-
- return htab_hash_string (IDENTIFIER_POINTER (asmname));
-}
-
/* Compute the number of bytes occupied by a tree with code CODE.
This function cannot be used for nodes that have variable sizes,
including TREE_VEC, STRING_CST, and CALL_EXPR. */
@@ -2386,35 +2310,6 @@ real_onep (const_tree expr)
}
}
-/* Return 1 if EXPR is the real constant two. Trailing zeroes matter
- for decimal float constants, so don't return 1 for them. */
-
-int
-real_twop (const_tree expr)
-{
- STRIP_NOPS (expr);
-
- switch (TREE_CODE (expr))
- {
- case REAL_CST:
- return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2)
- && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
- case COMPLEX_CST:
- return real_twop (TREE_REALPART (expr))
- && real_zerop (TREE_IMAGPART (expr));
- case VECTOR_CST:
- {
- unsigned i;
- for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
- if (!real_twop (VECTOR_CST_ELT (expr, i)))
- return false;
- return true;
- }
- default:
- return false;
- }
-}
-
/* Return 1 if EXPR is the real constant minus one. Trailing zeroes
matter for decimal float constants, so don't return 1 for them. */
@@ -2510,21 +2405,6 @@ chain_index (int idx, tree chain)
return chain;
}
-/* Return nonzero if ELEM is part of the chain CHAIN. */
-
-int
-chain_member (const_tree elem, const_tree chain)
-{
- while (chain)
- {
- if (elem == chain)
- return 1;
- chain = DECL_CHAIN (chain);
- }
-
- return 0;
-}
-
/* Return the length of a chain of nodes chained through TREE_CHAIN.
We expect a null pointer to mark the end of the chain.
This is the Lisp primitive `length'. */
@@ -2552,21 +2432,6 @@ list_length (const_tree t)
return len;
}
-/* Returns the number of FIELD_DECLs in TYPE. */
-
-int
-fields_length (const_tree type)
-{
- tree t = TYPE_FIELDS (type);
- int count = 0;
-
- for (; t; t = DECL_CHAIN (t))
- if (TREE_CODE (t) == FIELD_DECL)
- ++count;
-
- return count;
-}
-
/* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
UNION_TYPE TYPE, or NULL_TREE if none. */
@@ -2688,23 +2553,6 @@ tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
TREE_VALUE (node) = value;
return node;
}
-
-/* Return the values of the elements of a CONSTRUCTOR as a vector of
- trees. */
-
-vec<tree, va_gc> *
-ctor_to_vec (tree ctor)
-{
- vec<tree, va_gc> *vec;
- vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
- unsigned int ix;
- tree val;
-
- FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
- vec->quick_push (val);
-
- return vec;
-}
/* Return the size nominally occupied by an object of type TYPE
when it resides in memory. The value is measured in units of bytes,
@@ -2787,18 +2635,6 @@ max_int_size_in_bytes (const_tree type)
return size;
}
-
-/* Returns a tree for the size of EXP in bytes. */
-
-tree
-tree_expr_size (const_tree exp)
-{
- if (DECL_P (exp)
- && DECL_SIZE_UNIT (exp) != 0)
- return DECL_SIZE_UNIT (exp);
- else
- return size_in_bytes (TREE_TYPE (exp));
-}
/* Return the bit position of FIELD, in bits from the start of the record.
This is a tree of type bitsizetype. */
@@ -3902,6 +3738,88 @@ substitute_placeholder_in_expr (tree exp, tree obj)
return new_tree;
}
+
+/* Subroutine of stabilize_reference; this is called for subtrees of
+ references. Any expression with side-effects must be put in a SAVE_EXPR
+ to ensure that it is only evaluated once.
+
+ We don't put SAVE_EXPR nodes around everything, because assigning very
+ simple expressions to temporaries causes us to miss good opportunities
+ for optimizations. Among other things, the opportunity to fold in the
+ addition of a constant into an addressing mode often gets lost, e.g.
+ "y[i+1] += x;". In general, we take the approach that we should not make
+ an assignment unless we are forced into it - i.e., that any non-side effect
+ operator should be allowed, and that cse should take care of coalescing
+ multiple utterances of the same expression should that prove fruitful. */
+
+static tree
+stabilize_reference_1 (tree e)
+{
+ tree result;
+ enum tree_code code = TREE_CODE (e);
+
+ /* We cannot ignore const expressions because it might be a reference
+ to a const array but whose index contains side-effects. But we can
+ ignore things that are actual constant or that already have been
+ handled by this function. */
+
+ if (tree_invariant_p (e))
+ return e;
+
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_exceptional:
+ case tcc_type:
+ case tcc_declaration:
+ case tcc_comparison:
+ case tcc_statement:
+ case tcc_expression:
+ case tcc_reference:
+ case tcc_vl_exp:
+ /* If the expression has side-effects, then encase it in a SAVE_EXPR
+ so that it will only be evaluated once. */
+ /* The reference (r) and comparison (<) classes could be handled as
+ below, but it is generally faster to only evaluate them once. */
+ if (TREE_SIDE_EFFECTS (e))
+ return save_expr (e);
+ return e;
+
+ case tcc_constant:
+ /* Constants need no processing. In fact, we should never reach
+ here. */
+ return e;
+
+ case tcc_binary:
+ /* Division is slow and tends to be compiled with jumps,
+ especially the division by powers of 2 that is often
+ found inside of an array reference. So do it just once. */
+ if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
+ || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
+ || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
+ || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
+ return save_expr (e);
+ /* Recursively stabilize each operand. */
+ result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
+ stabilize_reference_1 (TREE_OPERAND (e, 1)));
+ break;
+
+ case tcc_unary:
+ /* Recursively stabilize each operand. */
+ result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ TREE_TYPE (result) = TREE_TYPE (e);
+ TREE_READONLY (result) = TREE_READONLY (e);
+ TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
+ TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
+
+ return result;
+}
+
/* Stabilize a reference so that we can use it any number of times
without causing its operands to be evaluated more than once.
Returns the stabilized reference. This works by means of save_expr,
@@ -3983,87 +3901,6 @@ stabilize_reference (tree ref)
return result;
}
-
-/* Subroutine of stabilize_reference; this is called for subtrees of
- references. Any expression with side-effects must be put in a SAVE_EXPR
- to ensure that it is only evaluated once.
-
- We don't put SAVE_EXPR nodes around everything, because assigning very
- simple expressions to temporaries causes us to miss good opportunities
- for optimizations. Among other things, the opportunity to fold in the
- addition of a constant into an addressing mode often gets lost, e.g.
- "y[i+1] += x;". In general, we take the approach that we should not make
- an assignment unless we are forced into it - i.e., that any non-side effect
- operator should be allowed, and that cse should take care of coalescing
- multiple utterances of the same expression should that prove fruitful. */
-
-tree
-stabilize_reference_1 (tree e)
-{
- tree result;
- enum tree_code code = TREE_CODE (e);
-
- /* We cannot ignore const expressions because it might be a reference
- to a const array but whose index contains side-effects. But we can
- ignore things that are actual constant or that already have been
- handled by this function. */
-
- if (tree_invariant_p (e))
- return e;
-
- switch (TREE_CODE_CLASS (code))
- {
- case tcc_exceptional:
- case tcc_type:
- case tcc_declaration:
- case tcc_comparison:
- case tcc_statement:
- case tcc_expression:
- case tcc_reference:
- case tcc_vl_exp:
- /* If the expression has side-effects, then encase it in a SAVE_EXPR
- so that it will only be evaluated once. */
- /* The reference (r) and comparison (<) classes could be handled as
- below, but it is generally faster to only evaluate them once. */
- if (TREE_SIDE_EFFECTS (e))
- return save_expr (e);
- return e;
-
- case tcc_constant:
- /* Constants need no processing. In fact, we should never reach
- here. */
- return e;
-
- case tcc_binary:
- /* Division is slow and tends to be compiled with jumps,
- especially the division by powers of 2 that is often
- found inside of an array reference. So do it just once. */
- if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
- || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
- || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
- || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
- return save_expr (e);
- /* Recursively stabilize each operand. */
- result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
- stabilize_reference_1 (TREE_OPERAND (e, 1)));
- break;
-
- case tcc_unary:
- /* Recursively stabilize each operand. */
- result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
- break;
-
- default:
- gcc_unreachable ();
- }
-
- TREE_TYPE (result) = TREE_TYPE (e);
- TREE_READONLY (result) = TREE_READONLY (e);
- TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
- TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
-
- return result;
-}
/* Low-level constructors for expressions. */
@@ -4758,43 +4595,22 @@ omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
return true;
}
-/* Remove duplicate "omp declare simd" attributes. */
+/* Compare two constructor-element-type constants. Return 1 if the lists
+ are known to be equal; otherwise return 0. */
-void
-omp_remove_redundant_declare_simd_attrs (tree fndecl)
+static bool
+simple_cst_list_equal (const_tree l1, const_tree l2)
{
- tree attr, end_attr = NULL_TREE, last_attr = NULL_TREE;
- for (attr = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (fndecl));
- attr;
- attr = lookup_attribute ("omp declare simd", TREE_CHAIN (attr)))
+ while (l1 != NULL_TREE && l2 != NULL_TREE)
{
- tree *pc;
- for (pc = &TREE_CHAIN (attr); *pc && *pc != end_attr; )
- {
- if (is_attribute_p ("omp declare simd", TREE_PURPOSE (*pc)))
- {
- last_attr = TREE_CHAIN (*pc);
- if (TREE_VALUE (attr) == NULL_TREE)
- {
- if (TREE_VALUE (*pc) == NULL_TREE)
- {
- *pc = TREE_CHAIN (*pc);
- continue;
- }
- }
- else if (TREE_VALUE (*pc) != NULL_TREE
- && omp_declare_simd_clauses_equal
- (TREE_VALUE (TREE_VALUE (*pc)),
- TREE_VALUE (TREE_VALUE (attr))))
- {
- *pc = TREE_CHAIN (*pc);
- continue;
- }
- }
- pc = &TREE_CHAIN (*pc);
- }
- end_attr = last_attr;
+ if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
+ return false;
+
+ l1 = TREE_CHAIN (l1);
+ l2 = TREE_CHAIN (l2);
}
+
+ return l1 == l2;
}
/* Compare two attributes for their value identity. Return true if the
@@ -7073,17 +6889,6 @@ tree_low_cst (const_tree t, int pos)
return TREE_INT_CST_LOW (t);
}
-/* Return the HOST_WIDE_INT least significant bits of T, a sizetype
- kind INTEGER_CST. This makes sure to properly sign-extend the
- constant. */
-
-HOST_WIDE_INT
-size_low_cst (const_tree t)
-{
- double_int d = tree_to_double_int (t);
- return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
-}
-
/* Return the most significant (sign) bit of T. */
int
@@ -7145,24 +6950,6 @@ tree_int_cst_min_precision (tree value, bool unsignedp)
return tree_floor_log2 (value) + 1 + !unsignedp;
}
-/* Compare two constructor-element-type constants. Return 1 if the lists
- are known to be equal; otherwise return 0. */
-
-int
-simple_cst_list_equal (const_tree l1, const_tree l2)
-{
- while (l1 != NULL_TREE && l2 != NULL_TREE)
- {
- if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
- return 0;
-
- l1 = TREE_CHAIN (l1);
- l2 = TREE_CHAIN (l2);
- }
-
- return l1 == l2;
-}
-
/* Return truthvalue of whether T1 is the same tree structure as T2.
Return 1 if they are the same.
Return 0 if they are understandably different.
@@ -7588,29 +7375,6 @@ iterative_hash_expr (const_tree t, hashval_t val)
}
}
-/* Generate a hash value for a pair of expressions. This can be used
- iteratively by passing a previous result as the VAL argument.
-
- The same hash value is always returned for a given pair of expressions,
- regardless of the order in which they are presented. This is useful in
- hashing the operands of commutative functions. */
-
-hashval_t
-iterative_hash_exprs_commutative (const_tree t1,
- const_tree t2, hashval_t val)
-{
- hashval_t one = iterative_hash_expr (t1, 0);
- hashval_t two = iterative_hash_expr (t2, 0);
- hashval_t t;
-
- if (one > two)
- t = one, one = two, two = t;
- val = iterative_hash_hashval_t (one, val);
- val = iterative_hash_hashval_t (two, val);
-
- return val;
-}
-
/* Constructors for pointer, array and function types.
(RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
constructed by language-dependent code, not here.) */
@@ -7752,30 +7516,6 @@ build_reference_type (tree to_type)
return build_reference_type_for_mode (to_type, pointer_mode, false);
}
-/* Build a type that is compatible with t but has no cv quals anywhere
- in its type, thus
-
- const char *const *const * -> char ***. */
-
-tree
-build_type_no_quals (tree t)
-{
- switch (TREE_CODE (t))
- {
- case POINTER_TYPE:
- return build_pointer_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
- TYPE_MODE (t),
- TYPE_REF_CAN_ALIAS_ALL (t));
- case REFERENCE_TYPE:
- return
- build_reference_type_for_mode (build_type_no_quals (TREE_TYPE (t)),
- TYPE_MODE (t),
- TYPE_REF_CAN_ALIAS_ALL (t));
- default:
- return TYPE_MAIN_VARIANT (t);
- }
-}
-
#define MAX_INT_CACHED_PREC \
(HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
@@ -8151,111 +7891,6 @@ build_function_type (tree value_type, tree arg_types)
return t;
}
-/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
- return value if SKIP_RETURN is true. */
-
-static tree
-build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
- bool skip_return)
-{
- tree new_type = NULL;
- tree args, new_args = NULL, t;
- tree new_reversed;
- int i = 0;
-
- for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
- args = TREE_CHAIN (args), i++)
- if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
- new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
-
- new_reversed = nreverse (new_args);
- if (args)
- {
- if (new_reversed)
- TREE_CHAIN (new_args) = void_list_node;
- else
- new_reversed = void_list_node;
- }
-
- /* Use copy_node to preserve as much as possible from original type
- (debug info, attribute lists etc.)
- Exception is METHOD_TYPEs must have THIS argument.
- When we are asked to remove it, we need to build new FUNCTION_TYPE
- instead. */
- if (TREE_CODE (orig_type) != METHOD_TYPE
- || !args_to_skip
- || !bitmap_bit_p (args_to_skip, 0))
- {
- new_type = build_distinct_type_copy (orig_type);
- TYPE_ARG_TYPES (new_type) = new_reversed;
- }
- else
- {
- new_type
- = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
- new_reversed));
- TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
- }
-
- if (skip_return)
- TREE_TYPE (new_type) = void_type_node;
-
- /* This is a new type, not a copy of an old type. Need to reassociate
- variants. We can handle everything except the main variant lazily. */
- t = TYPE_MAIN_VARIANT (orig_type);
- if (t != orig_type)
- {
- t = build_function_type_skip_args (t, args_to_skip, skip_return);
- TYPE_MAIN_VARIANT (new_type) = t;
- TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
- TYPE_NEXT_VARIANT (t) = new_type;
- }
- else
- {
- TYPE_MAIN_VARIANT (new_type) = new_type;
- TYPE_NEXT_VARIANT (new_type) = NULL;
- }
-
- return new_type;
-}
-
-/* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
- return value if SKIP_RETURN is true.
-
- Arguments from DECL_ARGUMENTS list can't be removed now, since they are
- linked by TREE_CHAIN directly. The caller is responsible for eliminating
- them when they are being duplicated (i.e. copy_arguments_for_versioning). */
-
-tree
-build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
- bool skip_return)
-{
- tree new_decl = copy_node (orig_decl);
- tree new_type;
-
- new_type = TREE_TYPE (orig_decl);
- if (prototype_p (new_type)
- || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
- new_type
- = build_function_type_skip_args (new_type, args_to_skip, skip_return);
- TREE_TYPE (new_decl) = new_type;
-
- /* For declarations setting DECL_VINDEX (i.e. methods)
- we expect first argument to be THIS pointer. */
- if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
- DECL_VINDEX (new_decl) = NULL_TREE;
-
- /* When signature changes, we need to clear builtin info. */
- if (DECL_BUILT_IN (new_decl)
- && args_to_skip
- && !bitmap_empty_p (args_to_skip))
- {
- DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
- DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
- }
- return new_decl;
-}
-
/* Build a function type. The RETURN_TYPE is the type returned by the
function. If VAARGS is set, no void_type_node is appended to the
the list. ARGP must be always be terminated be a NULL_TREE. */
@@ -8419,21 +8054,6 @@ build_method_type_directly (tree basetype,
return t;
}
-/* Construct, lay out and return the type of methods belonging to class
- BASETYPE and whose arguments and values are described by TYPE.
- If that type exists already, reuse it.
- TYPE must be a FUNCTION_TYPE node. */
-
-tree
-build_method_type (tree basetype, tree type)
-{
- gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
-
- return build_method_type_directly (basetype,
- TREE_TYPE (type),
- TYPE_ARG_TYPES (type));
-}
-
/* Construct, lay out and return the type of offsets to a value
of type TYPE, within an object of type BASETYPE.
If a suitable offset type exists already, reuse it. */
@@ -10693,68 +10313,6 @@ build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
return ret;
}
-
-/* Returns true if it is possible to prove that the index of
- an array access REF (an ARRAY_REF expression) falls into the
- array bounds. */
-
-bool
-in_array_bounds_p (tree ref)
-{
- tree idx = TREE_OPERAND (ref, 1);
- tree min, max;
-
- if (TREE_CODE (idx) != INTEGER_CST)
- return false;
-
- min = array_ref_low_bound (ref);
- max = array_ref_up_bound (ref);
- if (!min
- || !max
- || TREE_CODE (min) != INTEGER_CST
- || TREE_CODE (max) != INTEGER_CST)
- return false;
-
- if (tree_int_cst_lt (idx, min)
- || tree_int_cst_lt (max, idx))
- return false;
-
- return true;
-}
-
-/* Returns true if it is possible to prove that the range of
- an array access REF (an ARRAY_RANGE_REF expression) falls
- into the array bounds. */
-
-bool
-range_in_array_bounds_p (tree ref)
-{
- tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
- tree range_min, range_max, min, max;
-
- range_min = TYPE_MIN_VALUE (domain_type);
- range_max = TYPE_MAX_VALUE (domain_type);
- if (!range_min
- || !range_max
- || TREE_CODE (range_min) != INTEGER_CST
- || TREE_CODE (range_max) != INTEGER_CST)
- return false;
-
- min = array_ref_low_bound (ref);
- max = array_ref_up_bound (ref);
- if (!min
- || !max
- || TREE_CODE (min) != INTEGER_CST
- || TREE_CODE (max) != INTEGER_CST)
- return false;
-
- if (tree_int_cst_lt (range_min, min)
- || tree_int_cst_lt (max, range_max))
- return false;
-
- return true;
-}
-
/* Return true if T (assumed to be a DECL) must be assigned a memory
location. */
@@ -10792,36 +10350,6 @@ int_cst_value (const_tree x)
return val;
}
-/* Return value of a constant X and sign-extend it. */
-
-HOST_WIDEST_INT
-widest_int_cst_value (const_tree x)
-{
- unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
- unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
-
-#if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
- gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
- val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_HIGH (x))
- << HOST_BITS_PER_WIDE_INT);
-#else
- /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
- gcc_assert (TREE_INT_CST_HIGH (x) == 0
- || TREE_INT_CST_HIGH (x) == -1);
-#endif
-
- if (bits < HOST_BITS_PER_WIDEST_INT)
- {
- bool negative = ((val >> (bits - 1)) & 1) != 0;
- if (negative)
- val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
- else
- val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
- }
-
- return val;
-}
-
/* If TYPE is an integral or pointer type, return an integer type with
the same precision which is unsigned iff UNSIGNEDP is true, or itself
if TYPE is already an integer type of signedness UNSIGNEDP. */
@@ -10870,22 +10398,6 @@ signed_type_for (tree type)
return signed_or_unsigned_type_for (0, type);
}
-/* If TYPE is a vector type, return a signed integer vector type with the
- same width and number of subparts. Otherwise return boolean_type_node. */
-
-tree
-truth_type_for (tree type)
-{
- if (TREE_CODE (type) == VECTOR_TYPE)
- {
- tree elem = lang_hooks.types.type_for_size
- (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
- return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
- }
- else
- return boolean_type_node;
-}
-
/* Returns the largest value obtainable by casting something in INNER type to
OUTER type. */
@@ -11822,17 +11334,6 @@ block_ultimate_origin (const_tree block)
}
}
-/* Return true if T1 and T2 are equivalent lists. */
-
-bool
-list_equal_p (const_tree t1, const_tree t2)
-{
- for (; t1 && t2; t1 = TREE_CHAIN (t1) , t2 = TREE_CHAIN (t2))
- if (TREE_VALUE (t1) != TREE_VALUE (t2))
- return false;
- return !t1 && !t2;
-}
-
/* Return true iff conversion in EXP generates no instruction. Mark
it inline so that we fully inline into the stripping functions even
though we have two uses of this function. */
@@ -3367,8 +3367,6 @@ tree_operand_check_code (const_tree __t, enum tree_code __code, int __i,
|| ((NODE) && TREE_TYPE ((NODE)) == error_mark_node))
extern tree decl_assembler_name (tree);
-extern bool decl_assembler_name_equal (tree decl, const_tree asmname);
-extern hashval_t decl_assembler_name_hash (const_tree asmname);
/* Compute the number of bytes occupied by 'node'. This routine only
looks at TREE_CODE and, if the code is TREE_VEC, TREE_VEC_LENGTH. */
@@ -3591,7 +3589,6 @@ extern tree make_unsigned_type (int);
extern tree signed_or_unsigned_type_for (int, tree);
extern tree signed_type_for (tree);
extern tree unsigned_type_for (tree);
-extern tree truth_type_for (tree);
extern void initialize_sizetypes (void);
extern void fixup_unsigned_type (tree);
extern tree build_pointer_type_for_mode (tree, enum machine_mode, bool);
@@ -3601,14 +3598,12 @@ extern tree build_reference_type (tree);
extern tree build_vector_type_for_mode (tree, enum machine_mode);
extern tree build_vector_type (tree innertype, int nunits);
extern tree build_opaque_vector_type (tree innertype, int nunits);
-extern tree build_type_no_quals (tree);
extern tree build_index_type (tree);
extern tree build_array_type (tree, tree);
extern tree build_nonshared_array_type (tree, tree);
extern tree build_array_type_nelts (tree, unsigned HOST_WIDE_INT);
extern tree build_function_type (tree, tree);
extern tree build_function_type_list (tree, ...);
-extern tree build_function_decl_skip_args (tree, bitmap, bool);
extern tree build_varargs_function_type_list (tree, ...);
extern tree build_function_type_array (tree, int, tree *);
extern tree build_varargs_function_type_array (tree, int, tree *);
@@ -3618,12 +3613,9 @@ extern tree build_varargs_function_type_array (tree, int, tree *);
build_varargs_function_type_array (RET, vec_safe_length (V), \
vec_safe_address (V))
extern tree build_method_type_directly (tree, tree, tree);
-extern tree build_method_type (tree, tree);
extern tree build_offset_type (tree, tree);
extern tree build_complex_type (tree);
extern tree array_type_nelts (const_tree);
-extern bool in_array_bounds_p (tree);
-extern bool range_in_array_bounds_p (tree);
extern tree value_member (tree, tree);
extern tree purpose_member (const_tree, tree);
@@ -3649,7 +3641,6 @@ tree_low_cst (const_tree t, int pos)
return TREE_INT_CST_LOW (t);
}
#endif
-extern HOST_WIDE_INT size_low_cst (const_tree);
extern int tree_int_cst_sgn (const_tree);
extern int tree_int_cst_sign_bit (const_tree);
extern unsigned int tree_int_cst_min_precision (tree, bool);
@@ -3708,9 +3699,6 @@ extern tree build_type_attribute_variant (tree, tree);
extern tree build_decl_attribute_variant (tree, tree);
extern tree build_type_attribute_qual_variant (tree, tree, int);
-/* Remove redundant "omp declare simd" attributes from fndecl. */
-extern void omp_remove_redundant_declare_simd_attrs (tree);
-
/* Return 0 if the attributes for two types are incompatible, 1 if they
are compatible, and 2 if they are nearly compatible (which causes a
warning to be generated). */
@@ -3884,7 +3872,6 @@ extern tree expr_last (tree);
extern tree size_in_bytes (const_tree);
extern HOST_WIDE_INT int_size_in_bytes (const_tree);
extern HOST_WIDE_INT max_int_size_in_bytes (const_tree);
-extern tree tree_expr_size (const_tree);
extern tree bit_position (const_tree);
extern HOST_WIDE_INT int_bit_position (const_tree);
extern tree byte_position (const_tree);
@@ -3945,10 +3932,6 @@ extern tree nreverse (tree);
extern int list_length (const_tree);
-/* Returns the number of FIELD_DECLs in a type. */
-
-extern int fields_length (const_tree);
-
/* Returns the first FIELD_DECL in a type. */
extern tree first_field (const_tree);
@@ -3963,10 +3946,6 @@ extern bool initializer_zerop (const_tree);
extern tree uniform_vector_p (const_tree);
-/* Given a CONSTRUCTOR CTOR, return the element values as a vector. */
-
-extern vec<tree, va_gc> *ctor_to_vec (tree);
-
extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, bool *);
@@ -4109,12 +4088,6 @@ extern tree variable_size (tree);
extern tree stabilize_reference (tree);
-/* Subroutine of stabilize_reference; this is called for subtrees of
- references. Any expression with side-effects must be put in a SAVE_EXPR
- to ensure that it is only evaluated once. */
-
-extern tree stabilize_reference_1 (tree);
-
/* Return EXP, stripped of any conversions to wider types
in such a way that the result of converting to type FOR_TYPE
is the same as if EXP were converted to FOR_TYPE.
@@ -4297,11 +4270,6 @@ extern tree unshare_expr_without_location (tree);
/* In stmt.c */
extern void expand_label (tree);
-extern void expand_goto (tree);
-
-extern rtx expand_stack_save (void);
-extern void expand_stack_restore (tree);
-extern void expand_return (tree);
/* Compare and hash for any structure which begins with a canonical
pointer. Assumes all pointers are interchangeable, which is sort
@@ -4371,7 +4339,6 @@ extern tree fold_build3_stat_loc (location_t, enum tree_code, tree, tree, tree,
tree MEM_STAT_DECL);
extern tree fold_build1_initializer_loc (location_t, enum tree_code, tree, tree);
extern tree fold_build2_initializer_loc (location_t, enum tree_code, tree, tree, tree);
-extern tree fold_build3_initializer_loc (location_t, enum tree_code, tree, tree, tree, tree);
#define fold_build_call_array(T1,T2,N,T4)\
fold_build_call_array_loc (UNKNOWN_LOCATION, T1, T2, N, T4)
extern tree fold_build_call_array_loc (location_t, tree, tree, int, tree *);
@@ -4402,7 +4369,6 @@ extern tree omit_two_operands_loc (location_t, tree, tree, tree, tree);
#define invert_truthvalue(T)\
invert_truthvalue_loc (UNKNOWN_LOCATION, T)
extern tree invert_truthvalue_loc (location_t, tree);
-extern tree fold_unary_to_constant (enum tree_code, tree, tree);
extern tree fold_binary_to_constant (enum tree_code, tree, tree, tree);
extern tree fold_read_from_constant_string (tree);
extern tree int_const_binop (enum tree_code, const_tree, const_tree);
@@ -4434,7 +4400,6 @@ extern enum tree_code swap_tree_comparison (enum tree_code);
extern bool ptr_difference_const (tree, tree, HOST_WIDE_INT *);
extern enum tree_code invert_tree_comparison (enum tree_code, bool);
-extern bool tree_expr_nonzero_p (tree);
extern bool tree_unary_nonzero_warnv_p (enum tree_code, tree, tree, bool *);
extern bool tree_binary_nonzero_warnv_p (enum tree_code, tree, tree, tree op1,
bool *);
@@ -4443,11 +4408,8 @@ extern bool tree_unary_nonnegative_warnv_p (enum tree_code, tree, tree, bool *);
extern bool tree_binary_nonnegative_warnv_p (enum tree_code, tree, tree, tree,
bool *);
extern bool tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p);
-extern bool tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p);
extern bool tree_call_nonnegative_warnv_p (tree, tree, tree, tree, bool *);
-extern bool tree_expr_nonzero_warnv_p (tree, bool *);
-
extern bool fold_real_zero_addition_p (const_tree, const_tree, int);
extern tree combine_comparisons (location_t, enum tree_code, enum tree_code,
enum tree_code, tree, tree, tree);
@@ -4518,7 +4480,6 @@ extern tree fold_builtin_stxcpy_chk (location_t, tree, tree, tree, tree, tree, b
enum built_in_function);
extern tree fold_builtin_stxncpy_chk (location_t, tree, tree, tree, tree, tree, bool,
enum built_in_function);
-extern tree fold_builtin_snprintf_chk (location_t, tree, tree, enum built_in_function);
extern bool fold_builtin_next_arg (tree, bool);
extern enum built_in_function builtin_mathfn_code (const_tree);
extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
@@ -4529,7 +4490,6 @@ extern tree build_call_expr (tree, int, ...);
extern tree mathfn_built_in (tree, enum built_in_function fn);
extern tree c_strlen (tree, int);
extern tree build_string_literal (int, const char *);
-extern bool validate_arglist (const_tree, ...);
extern rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
extern bool is_builtin_fn (tree);
extern bool get_object_alignment_1 (tree, unsigned int *,
@@ -4567,22 +4527,17 @@ extern int tree_floor_log2 (const_tree);
extern unsigned int tree_ctz (const_tree);
extern int simple_cst_equal (const_tree, const_tree);
extern hashval_t iterative_hash_expr (const_tree, hashval_t);
-extern hashval_t iterative_hash_exprs_commutative (const_tree,
- const_tree, hashval_t);
extern hashval_t iterative_hash_host_wide_int (HOST_WIDE_INT, hashval_t);
extern hashval_t iterative_hash_hashval_t (hashval_t, hashval_t);
extern hashval_t iterative_hash_host_wide_int (HOST_WIDE_INT, hashval_t);
extern int compare_tree_int (const_tree, unsigned HOST_WIDE_INT);
extern int type_list_equal (const_tree, const_tree);
-extern int chain_member (const_tree, const_tree);
-extern int simple_cst_list_equal (const_tree, const_tree);
extern void dump_tree_statistics (void);
extern void recompute_tree_invariant_for_addr_expr (tree);
extern bool needs_to_live_in_memory (const_tree);
extern tree reconstruct_complex_type (tree, tree);
extern int real_onep (const_tree);
-extern int real_twop (const_tree);
extern int real_minus_onep (const_tree);
extern void init_ttree (void);
extern void build_common_tree_nodes (bool, bool);
@@ -4592,7 +4547,6 @@ extern tree build_range_type (tree, tree, tree);
extern tree build_nonshared_range_type (tree, tree, tree);
extern bool subrange_type_for_debug_p (const_tree, tree *, tree *);
extern HOST_WIDE_INT int_cst_value (const_tree);
-extern HOST_WIDEST_INT widest_int_cst_value (const_tree);
extern tree tree_block (tree);
extern void tree_set_block (tree, tree);
@@ -4617,10 +4571,8 @@ extern const char *get_tree_code_name (enum tree_code);
extern tree build_addr (tree, tree);
/* In function.c */
-extern void expand_main_function (void);
extern void expand_function_end (void);
extern void expand_function_start (tree);
-extern void stack_protect_prologue (void);
extern void stack_protect_epilogue (void);
extern void init_dummy_function_start (void);
extern void expand_dummy_function_end (void);
@@ -4664,7 +4616,6 @@ extern void debug_raw (vec<tree, va_gc> *ptr);
#ifdef BUFSIZ
extern void dump_addr (FILE*, const char *, const void *);
extern void print_node (FILE *, const char *, tree, int);
-extern void print_vec_tree (FILE *, const char *, vec<tree, va_gc> *, int);
extern void print_node_brief (FILE *, const char *, const_tree, int);
extern void indent_to (FILE *, int);
#endif
@@ -4689,9 +4640,6 @@ extern bool must_pass_in_stack_var_size_or_pad (enum machine_mode, const_tree);
/* In attribs.c. */
extern const struct attribute_spec *lookup_attribute_spec (const_tree);
-extern const struct attribute_spec *lookup_scoped_attribute_spec (const_tree,
- const_tree);
-
extern void init_attributes (void);
/* Process the attributes listed in ATTRIBUTES and install them in *NODE,
@@ -4708,8 +4656,6 @@ extern bool cxx11_attribute_p (const_tree);
extern tree get_attribute_name (const_tree);
-extern tree get_attribute_namespace (const_tree);
-
extern void apply_tm_attr (tree, tree);
/* In stor-layout.c */
@@ -4723,7 +4669,6 @@ extern tree tree_output_constant_def (tree);
extern void make_decl_rtl (tree);
extern rtx make_decl_rtl_for_debug (tree);
extern void make_decl_one_only (tree, tree);
-extern int supports_one_only (void);
extern void resolve_unique_section (tree, int, int);
extern void mark_referenced (tree);
extern void mark_decl_referenced (tree);
@@ -4765,12 +4710,10 @@ extern bool initializer_constant_valid_for_bitfield_p (tree);
extern bool constructor_static_from_elts_p (const_tree);
/* In stmt.c */
-extern void expand_computed_goto (tree);
extern bool parse_output_constraint (const char **, int, int, int,
bool *, bool *, bool *);
extern bool parse_input_constraint (const char **, int, int, int, int,
const char * const *, bool *, bool *);
-extern void expand_asm_stmt (gimple);
extern tree resolve_asm_operand_names (tree, tree, tree, tree);
#ifdef HARD_CONST
/* Silly ifdef to avoid having all includers depend on hard-reg-set.h. */
@@ -4803,7 +4746,6 @@ extern void mark_addressable (tree);
extern int tree_map_base_eq (const void *, const void *);
extern unsigned int tree_map_base_hash (const void *);
extern int tree_map_base_marked_p (const void *);
-extern bool list_equal_p (const_tree, const_tree);
#define tree_map_eq tree_map_base_eq
extern unsigned int tree_map_hash (const void *);
@@ -4825,12 +4767,8 @@ extern unsigned int tree_decl_map_hash (const void *);
#define tree_vec_map_hash tree_decl_map_hash
#define tree_vec_map_marked_p tree_map_base_marked_p
-/* In tree-vrp.c */
-extern bool ssa_name_nonnegative_p (const_tree);
-
/* In tree-object-size.c. */
extern void init_object_sizes (void);
-extern void fini_object_sizes (void);
extern unsigned HOST_WIDE_INT compute_builtin_object_size (tree, int);
/* In expr.c. */
@@ -4840,9 +4778,6 @@ extern unsigned HOST_WIDE_INT compute_builtin_object_size (tree, int);
succeed. */
extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
-/* Is it an ADDR_EXPR of a DECL that's not in memory? */
-extern bool addr_expr_of_non_mem_decl_p (tree);
-
extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree);
extern tree build_personality_function (const char *);
@@ -4855,12 +4790,6 @@ extern bool is_tm_ending_fndecl (tree);
extern void record_tm_replacement (tree, tree);
extern void tm_malloc_replacement (tree);
-static inline bool
-is_tm_safe_or_pure (const_tree x)
-{
- return is_tm_safe (x) || is_tm_pure (x);
-}
-
/* In tree-inline.c. */
void init_inline_once (void);
@@ -4933,12 +4862,6 @@ more_call_expr_args_p (const call_expr_arg_iterator *iter)
return (iter->i < iter->n);
}
-static inline bool
-more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
-{
- return (iter->i < iter->n);
-}
-
/* Iterate through each argument ARG of CALL_EXPR CALL, using variable ITER
(of type call_expr_arg_iterator) to hold the iteration state. */
#define FOR_EACH_CALL_EXPR_ARG(arg, iter, call) \
@@ -4956,9 +4879,6 @@ is_lang_specific (tree t)
return TREE_CODE (t) == LANG_TYPE || TREE_CODE (t) >= NUM_TREE_CODES;
}
-/* In vtable-verify.c. */
-extern void save_vtable_map_decl (tree);
-
/* Valid builtin number. */
#define BUILTIN_VALID_P(FNCODE) \
(IN_RANGE ((int)FNCODE, ((int)BUILT_IN_NONE) + 1, ((int) END_BUILTINS) - 1))
@@ -5889,19 +5889,6 @@ maybe_assemble_visibility (tree decl)
return 0;
}
-/* Returns 1 if the target configuration supports defining public symbols
- so that one of them will be chosen at link time instead of generating a
- multiply-defined symbol error, whether through the use of weak symbols or
- a target-specific mechanism for having duplicates discarded. */
-
-int
-supports_one_only (void)
-{
- if (SUPPORTS_ONE_ONLY)
- return 1;
- return TARGET_SUPPORTS_WEAK;
-}
-
/* Set up DECL as a public symbol that can be defined in multiple
translation units without generating a linker error. */