===================================================================
@@ -18,22 +18,22 @@
;; along with GCC; see the file COPYING3. If not see
;; <http://www.gnu.org/licenses/>.
-;; Return nonzero if OP is either a i387 or SSE fp register.
+;; Return true if OP is either a i387 or SSE fp register.
(define_predicate "any_fp_register_operand"
(and (match_code "reg")
(match_test "ANY_FP_REGNO_P (REGNO (op))")))
-;; Return nonzero if OP is an i387 fp register.
+;; Return true if OP is an i387 fp register.
(define_predicate "fp_register_operand"
(and (match_code "reg")
(match_test "FP_REGNO_P (REGNO (op))")))
-;; Return nonzero if OP is a non-fp register_operand.
+;; Return true if OP is a non-fp register_operand.
(define_predicate "register_and_not_any_fp_reg_operand"
(and (match_code "reg")
(not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
-;; Return nonzero if OP is a register operand other than an i387 fp register.
+;; Return true if OP is a register operand other than an i387 fp register.
(define_predicate "register_and_not_fp_reg_operand"
(and (match_code "reg")
(not (match_test "FP_REGNO_P (REGNO (op))"))))
@@ -63,18 +63,18 @@
{
if ((!TARGET_64BIT || GET_MODE (op) != DImode)
&& GET_MODE (op) != SImode && GET_MODE (op) != HImode)
- return 0;
+ return false;
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
/* Be careful to accept only registers having upper parts. */
- return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
+ return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG;
})
;; Return true if op is the AX register.
(define_predicate "ax_reg_operand"
(and (match_code "reg")
- (match_test "REGNO (op) == 0")))
+ (match_test "REGNO (op) == AX_REG")))
;; Return true if op is the flags register.
(define_predicate "flags_reg_operand"
@@ -98,16 +98,14 @@
;; Return true if op is not xmm0 register.
(define_predicate "reg_not_xmm0_operand"
(and (match_operand 0 "register_operand")
- (match_test "!REG_P (op)
- || REGNO (op) != FIRST_SSE_REG")))
+ (match_test "REGNO (op) != FIRST_SSE_REG")))
;; As above, but allow nonimmediate operands.
(define_predicate "nonimm_not_xmm0_operand"
- (and (match_operand 0 "nonimmediate_operand")
- (match_test "!REG_P (op)
- || REGNO (op) != FIRST_SSE_REG")))
+ (ior (match_operand 0 "memory_operand")
+ (match_operand 0 "reg_not_xmm0_operand")))
-;; Return 1 if VALUE can be stored in a sign extended immediate field.
+;; Return true if VALUE can be stored in a sign extended immediate field.
(define_predicate "x86_64_immediate_operand"
(match_code "const_int,symbol_ref,label_ref,const")
{
@@ -121,7 +119,7 @@
to be at least 32 and this all acceptable constants are
represented as CONST_INT. */
if (HOST_BITS_PER_WIDE_INT == 32)
- return 1;
+ return true;
else
{
HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
@@ -155,7 +153,7 @@
case UNSPEC_DTPOFF:
case UNSPEC_GOTNTPOFF:
case UNSPEC_NTPOFF:
- return 1;
+ return true;
default:
break;
}
@@ -167,16 +165,16 @@
HOST_WIDE_INT offset;
if (ix86_cmodel == CM_LARGE)
- return 0;
+ return false;
if (!CONST_INT_P (op2))
- return 0;
+ return false;
offset = trunc_int_for_mode (INTVAL (op2), DImode);
switch (GET_CODE (op1))
{
case SYMBOL_REF:
/* TLS symbols are not constant. */
if (SYMBOL_REF_TLS_MODEL (op1))
- return 0;
+ return false;
/* For CM_SMALL assume that latest object is 16MB before
end of 31bits boundary. We may also accept pretty
large negative constants knowing that all objects are
@@ -186,7 +184,7 @@
&& !SYMBOL_REF_FAR_ADDR_P (op1)))
&& offset < 16*1024*1024
&& trunc_int_for_mode (offset, SImode) == offset)
- return 1;
+ return true;
/* For CM_KERNEL we know that all object resist in the
negative half of 32bits address space. We may not
accept negative offsets, since they may be just off
@@ -194,7 +192,7 @@
if (ix86_cmodel == CM_KERNEL
&& offset > 0
&& trunc_int_for_mode (offset, SImode) == offset)
- return 1;
+ return true;
break;
case LABEL_REF:
@@ -203,11 +201,11 @@
if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
&& offset < 16*1024*1024
&& trunc_int_for_mode (offset, SImode) == offset)
- return 1;
+ return true;
if (ix86_cmodel == CM_KERNEL
&& offset > 0
&& trunc_int_for_mode (offset, SImode) == offset)
- return 1;
+ return true;
break;
case UNSPEC:
@@ -217,7 +215,7 @@
case UNSPEC_NTPOFF:
if (offset > 0
&& trunc_int_for_mode (offset, SImode) == offset)
- return 1;
+ return true;
}
break;
@@ -231,10 +229,10 @@
gcc_unreachable ();
}
- return 0;
+ return false;
})
-;; Return 1 if VALUE can be stored in the zero extended immediate field.
+;; Return true if VALUE can be stored in the zero extended immediate field.
(define_predicate "x86_64_zext_immediate_operand"
(match_code "const_double,const_int,symbol_ref,label_ref,const")
{
@@ -244,7 +242,7 @@
if (HOST_BITS_PER_WIDE_INT == 32)
return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
else
- return 0;
+ return false;
case CONST_INT:
if (HOST_BITS_PER_WIDE_INT == 32)
@@ -274,13 +272,13 @@
rtx op2 = XEXP (XEXP (op, 0), 1);
if (ix86_cmodel == CM_LARGE)
- return 0;
+ return false;
switch (GET_CODE (op1))
{
case SYMBOL_REF:
/* TLS symbols are not constant. */
if (SYMBOL_REF_TLS_MODEL (op1))
- return 0;
+ return false;
/* For small code model we may accept pretty large positive
offsets, since one bit is available for free. Negative
offsets are limited by the size of NULL pointer area
@@ -291,7 +289,7 @@
&& CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
- return 1;
+ return true;
/* ??? For the kernel, we may accept adjustment of
-0x10000000, since we know that it will just convert
negative address space to positive, but perhaps this
@@ -305,11 +303,11 @@
&& CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
- return 1;
+ return true;
break;
default:
- return 0;
+ return false;
}
}
break;
@@ -317,17 +315,17 @@
default:
gcc_unreachable ();
}
- return 0;
+ return false;
})
-;; Return nonzero if OP is general operand representable on x86_64.
+;; Return true if OP is general operand representable on x86_64.
(define_predicate "x86_64_general_operand"
(if_then_else (match_test "TARGET_64BIT")
(ior (match_operand 0 "nonimmediate_operand")
(match_operand 0 "x86_64_immediate_operand"))
(match_operand 0 "general_operand")))
-;; Return nonzero if OP is general operand representable on x86_64
+;; Return true if OP is general operand representable on x86_64
;; as either sign extended or zero extended constant.
(define_predicate "x86_64_szext_general_operand"
(if_then_else (match_test "TARGET_64BIT")
@@ -336,14 +334,14 @@
(match_operand 0 "x86_64_zext_immediate_operand"))
(match_operand 0 "general_operand")))
-;; Return nonzero if OP is nonmemory operand representable on x86_64.
+;; Return true if OP is nonmemory operand representable on x86_64.
(define_predicate "x86_64_nonmemory_operand"
(if_then_else (match_test "TARGET_64BIT")
(ior (match_operand 0 "register_operand")
(match_operand 0 "x86_64_immediate_operand"))
(match_operand 0 "nonmemory_operand")))
-;; Return nonzero if OP is nonmemory operand representable on x86_64.
+;; Return true if OP is nonmemory operand representable on x86_64.
(define_predicate "x86_64_szext_nonmemory_operand"
(if_then_else (match_test "TARGET_64BIT")
(ior (match_operand 0 "register_operand")
@@ -357,7 +355,7 @@
(match_code "const,symbol_ref,label_ref")
{
if (!flag_pic)
- return 0;
+ return false;
/* Rule out relocations that translate into 64bit constants. */
if (TARGET_64BIT && GET_CODE (op) == CONST)
{
@@ -367,13 +365,13 @@
if (GET_CODE (op) == UNSPEC
&& (XINT (op, 1) == UNSPEC_GOTOFF
|| XINT (op, 1) == UNSPEC_GOT))
- return 0;
+ return false;
}
return symbolic_operand (op, mode);
})
-;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
+;; Return true if OP is nonmemory operand acceptable by movabs patterns.
(define_predicate "x86_64_movabs_operand"
(if_then_else (match_test "!TARGET_64BIT || !flag_pic")
(match_operand 0 "nonmemory_operand")
@@ -381,7 +379,7 @@
(and (match_operand 0 "const_double_operand")
(match_test "GET_MODE_SIZE (mode) <= 8")))))
-;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
+;; Return true if OP is either a symbol reference or a sum of a symbol
;; reference and a constant.
(define_predicate "symbolic_operand"
(match_code "symbol_ref,label_ref,const")
@@ -390,7 +388,7 @@
{
case SYMBOL_REF:
case LABEL_REF:
- return 1;
+ return true;
case CONST:
op = XEXP (op, 0);
@@ -400,25 +398,25 @@
&& (XINT (op, 1) == UNSPEC_GOT
|| XINT (op, 1) == UNSPEC_GOTOFF
|| XINT (op, 1) == UNSPEC_GOTPCREL)))
- return 1;
+ return true;
if (GET_CODE (op) != PLUS
|| !CONST_INT_P (XEXP (op, 1)))
- return 0;
+ return false;
op = XEXP (op, 0);
if (GET_CODE (op) == SYMBOL_REF
|| GET_CODE (op) == LABEL_REF)
- return 1;
+ return true;
/* Only @GOTOFF gets offsets. */
if (GET_CODE (op) != UNSPEC
|| XINT (op, 1) != UNSPEC_GOTOFF)
- return 0;
+ return false;
op = XVECEXP (op, 0, 0);
if (GET_CODE (op) == SYMBOL_REF
|| GET_CODE (op) == LABEL_REF)
- return 1;
- return 0;
+ return true;
+ return false;
default:
gcc_unreachable ();
@@ -435,16 +433,16 @@
op = XEXP (XEXP (op, 0), 0);
if (GET_CODE (op) == LABEL_REF)
- return 1;
+ return true;
if (GET_CODE (op) != SYMBOL_REF)
- return 0;
+ return false;
- if (SYMBOL_REF_TLS_MODEL (op) != 0)
- return 0;
+ if (SYMBOL_REF_TLS_MODEL (op))
+ return false;
if (SYMBOL_REF_LOCAL_P (op))
- return 1;
+ return true;
/* There is, however, a not insubstantial body of code in the rest of
the compiler that assumes it can just stick the results of
@@ -453,9 +451,9 @@
always create a DECL an invoke targetm.encode_section_info. */
if (strncmp (XSTR (op, 0), internal_label_prefix,
internal_label_prefix_len) == 0)
- return 1;
+ return true;
- return 0;
+ return false;
})
;; Test for a legitimate @GOTOFF operand.
@@ -473,7 +471,7 @@
;; Test for various thread-local symbols.
(define_predicate "tls_symbolic_operand"
(and (match_code "symbol_ref")
- (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
+ (match_test "SYMBOL_REF_TLS_MODEL (op)")))
(define_predicate "tls_modbase_operand"
(and (match_code "symbol_ref")
@@ -519,7 +517,7 @@
op = SUBREG_REG (op);
if (!TARGET_64BIT && op == stack_pointer_rtx)
- return 0;
+ return false;
return register_no_elim_operand (op, mode);
})
@@ -635,13 +633,13 @@
return val <= 255*8 && val % 8 == 0;
})
-;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
+;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
;; for shift & compare patterns, as shifting by 0 does not change flags).
(define_predicate "const_1_to_31_operand"
(and (match_code "const_int")
(match_test "IN_RANGE (INTVAL (op), 1, 31)")))
-;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
+;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
(define_predicate "const_1_to_63_operand"
(and (match_code "const_int")
@@ -713,7 +711,7 @@
/* On Pentium4, the inc and dec operations causes extra dependency on flag
registers, since carry flag is not set. */
if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
- return 0;
+ return false;
return op == const1_rtx || op == constm1_rtx;
})
@@ -743,7 +741,7 @@
op = maybe_get_pool_constant (op);
if (!(op && GET_CODE (op) == CONST_VECTOR))
- return 0;
+ return false;
n_elts = CONST_VECTOR_NUNITS (op);
@@ -751,9 +749,9 @@
{
rtx elt = CONST_VECTOR_ELT (op, n_elts);
if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
- return 0;
+ return false;
}
- return 1;
+ return true;
})
/* Return true if operand is a vector constant that is all ones. */
@@ -770,28 +768,28 @@
{
rtx x = CONST_VECTOR_ELT (op, i);
if (x != constm1_rtx)
- return 0;
+ return false;
}
- return 1;
+ return true;
}
- return 0;
+ return false;
})
-; Return 1 when OP is operand acceptable for standard SSE move.
+; Return true when OP is operand acceptable for standard SSE move.
(define_predicate "vector_move_operand"
(ior (match_operand 0 "nonimmediate_operand")
(match_operand 0 "const0_operand")))
-;; Return 1 when OP is nonimmediate or standard SSE constant.
+;; Return true when OP is nonimmediate or standard SSE constant.
(define_predicate "nonimmediate_or_sse_const_operand"
(match_operand 0 "general_operand")
{
if (nonimmediate_operand (op, mode))
- return 1;
+ return true;
if (standard_sse_constant_p (op) > 0)
- return 1;
- return 0;
+ return true;
+ return false;
})
;; Return true if OP is a register or a zero.
@@ -812,7 +810,7 @@
return parts.seg == SEG_DEFAULT;
})
-;; Return nonzero if the rtx is known to be at least 32 bits aligned.
+;; Return true if the rtx is known to be at least 32 bits aligned.
(define_predicate "aligned_operand"
(match_operand 0 "general_operand")
{
@@ -821,26 +819,26 @@
/* Registers and immediate operands are always "aligned". */
if (!MEM_P (op))
- return 1;
+ return true;
/* All patterns using aligned_operand on memory operands ends up
in promoting memory operand to 64bit and thus causing memory mismatch. */
if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
- return 0;
+ return false;
/* Don't even try to do any aligned optimizations with volatiles. */
if (MEM_VOLATILE_P (op))
- return 0;
+ return false;
if (MEM_ALIGN (op) >= 32)
- return 1;
+ return true;
op = XEXP (op, 0);
/* Pushes and pops are only valid on the stack pointer. */
if (GET_CODE (op) == PRE_DEC
|| GET_CODE (op) == POST_INC)
- return 1;
+ return true;
/* Decode the address. */
ok = ix86_decompose_address (op, &parts);
@@ -850,25 +848,25 @@
if (parts.index)
{
if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
- return 0;
+ return false;
}
if (parts.base)
{
if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
- return 0;
+ return false;
}
if (parts.disp)
{
if (!CONST_INT_P (parts.disp)
- || (INTVAL (parts.disp) & 3) != 0)
- return 0;
+ || (INTVAL (parts.disp) & 3))
+ return false;
}
/* Didn't find one -- this must be an aligned address. */
- return 1;
+ return true;
})
-;; Returns 1 if OP is memory operand with a displacement.
+;; Return true if OP is memory operand with a displacement.
(define_predicate "memory_displacement_operand"
(match_operand 0 "memory_operand")
{
@@ -880,7 +878,7 @@
return parts.disp != NULL_RTX;
})
-;; Returns 1 if OP is memory operand with a displacement only.
+;; Return true if OP is memory operand with a displacement only.
(define_predicate "memory_displacement_only_operand"
(match_operand 0 "memory_operand")
{
@@ -888,18 +886,18 @@
int ok;
if (TARGET_64BIT)
- return 0;
+ return false;
ok = ix86_decompose_address (XEXP (op, 0), &parts);
gcc_assert (ok);
if (parts.base || parts.index)
- return 0;
+ return false;
return parts.disp != NULL_RTX;
})
-;; Returns 1 if OP is memory operand which will need zero or
+;; Return true if OP is memory operand which will need zero or
;; one register at most, not counting stack pointer or frame pointer.
(define_predicate "cmpxchg8b_pic_memory_operand"
(match_operand 0 "memory_operand")
@@ -914,26 +912,26 @@
|| parts.base == frame_pointer_rtx
|| parts.base == hard_frame_pointer_rtx
|| parts.base == stack_pointer_rtx)
- return 1;
+ return true;
if (parts.index == NULL_RTX
|| parts.index == arg_pointer_rtx
|| parts.index == frame_pointer_rtx
|| parts.index == hard_frame_pointer_rtx
|| parts.index == stack_pointer_rtx)
- return 1;
+ return true;
- return 0;
+ return false;
})
-;; Returns 1 if OP is memory operand that cannot be represented
+;; Return true if OP is memory operand that cannot be represented
;; by the modRM array.
(define_predicate "long_memory_operand"
(and (match_operand 0 "memory_operand")
- (match_test "memory_address_length (op) != 0")))
+ (match_test "memory_address_length (op)")))
-;; Return 1 if OP is a comparison operator that can be issued by fcmov.
+;; Return true if OP is a comparison operator that can be issued by fcmov.
(define_predicate "fcmov_comparison_operator"
(match_operand 0 "comparison_operator")
{
@@ -943,7 +941,7 @@
if (inmode == CCFPmode || inmode == CCFPUmode)
{
if (!ix86_trivial_fp_comparison_operator (op, mode))
- return 0;
+ return false;
code = ix86_fp_compare_code_to_integer (code);
}
/* i387 supports just limited amount of conditional codes. */
@@ -952,17 +950,17 @@
case LTU: case GTU: case LEU: case GEU:
if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
|| inmode == CCCmode)
- return 1;
- return 0;
+ return true;
+ return false;
case ORDERED: case UNORDERED:
case EQ: case NE:
- return 1;
+ return true;
default:
- return 0;
+ return false;
}
})
-;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
+;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
;; The first set are supported directly; the second set can't be done with
;; full IEEE support, i.e. NaNs.
;;
@@ -974,7 +972,7 @@
(define_special_predicate "sse_comparison_operator"
(match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
-;; Return 1 if OP is a comparison operator that can be issued by
+;; Return true if OP is a comparison operator that can be issued by
;; avx predicate generation instructions
(define_predicate "avx_comparison_float_operator"
(match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
@@ -988,7 +986,7 @@
(define_predicate "bt_comparison_operator"
(match_code "ne,eq"))
-;; Return 1 if OP is a valid comparison operator in valid mode.
+;; Return true if OP is a valid comparison operator in valid mode.
(define_predicate "ix86_comparison_operator"
(match_operand 0 "comparison_operator")
{
@@ -1001,30 +999,31 @@
switch (code)
{
case EQ: case NE:
- return 1;
+ return true;
case LT: case GE:
if (inmode == CCmode || inmode == CCGCmode
|| inmode == CCGOCmode || inmode == CCNOmode)
- return 1;
- return 0;
+ return true;
+ return false;
case LTU: case GTU: case LEU: case GEU:
if (inmode == CCmode || inmode == CCCmode)
- return 1;
- return 0;
+ return true;
+ return false;
case ORDERED: case UNORDERED:
if (inmode == CCmode)
- return 1;
- return 0;
+ return true;
+ return false;
case GT: case LE:
if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
- return 1;
- return 0;
+ return true;
+ return false;
default:
- return 0;
+ return false;
}
})
-;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
+;; Return true if OP is a valid comparison operator
+;; testing carry flag to be set.
(define_predicate "ix86_carry_flag_operator"
(match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
{
@@ -1034,22 +1033,22 @@
if (inmode == CCFPmode || inmode == CCFPUmode)
{
if (!ix86_trivial_fp_comparison_operator (op, mode))
- return 0;
+ return false;
code = ix86_fp_compare_code_to_integer (code);
}
else if (inmode == CCCmode)
return code == LTU || code == GTU;
else if (inmode != CCmode)
- return 0;
+ return false;
return code == LTU;
})
-;; Return 1 if this comparison only requires testing one flag bit.
+;; Return true if this comparison only requires testing one flag bit.
(define_predicate "ix86_trivial_fp_comparison_operator"
(match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
-;; Return 1 if we know how to do this comparison. Others require
+;; Return true if we know how to do this comparison. Others require
;; testing more than one flag bit, and we let the generic middle-end
;; code do that.
(define_predicate "ix86_fp_comparison_operator"
@@ -1063,7 +1062,7 @@
(match_operand 0 "comparison_operator")
{
enum rtx_code code = GET_CODE (op);
- int ret;
+ bool ret;
PUT_CODE (op, swap_condition (code));
ret = ix86_fp_comparison_operator (op, mode);
@@ -1103,7 +1102,7 @@
(define_predicate "commutative_operator"
(match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
-;; Return 1 if OP is a binary operator that can be promoted to wider mode.
+;; Return true if OP is a binary operator that can be promoted to wider mode.
(define_predicate "promotable_binary_operator"
(ior (match_code "plus,and,ior,xor,ashift")
(and (match_code "mult")
@@ -1115,19 +1114,19 @@
(define_predicate "absneg_operator"
(match_code "abs,neg"))
-;; Return 1 if OP is misaligned memory operand
+;; Return true if OP is misaligned memory operand
(define_predicate "misaligned_operand"
(and (match_code "mem")
(match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
-;; Return 1 if OP is a emms operation, known to be a PARALLEL.
+;; Return true if OP is a emms operation, known to be a PARALLEL.
(define_predicate "emms_operation"
(match_code "parallel")
{
unsigned i;
if (XVECLEN (op, 0) != 17)
- return 0;
+ return false;
for (i = 0; i < 8; i++)
{
@@ -1137,7 +1136,7 @@
|| GET_CODE (SET_DEST (elt)) != REG
|| GET_MODE (SET_DEST (elt)) != XFmode
|| REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
- return 0;
+ return false;
elt = XVECEXP (op, 0, i+9);
@@ -1145,19 +1144,19 @@
|| GET_CODE (SET_DEST (elt)) != REG
|| GET_MODE (SET_DEST (elt)) != DImode
|| REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
- return 0;
+ return false;
}
- return 1;
+ return true;
})
-;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
+;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
(define_predicate "vzeroall_operation"
(match_code "parallel")
{
unsigned i, nregs = TARGET_64BIT ? 16 : 8;
if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
- return 0;
+ return false;
for (i = 0; i < nregs; i++)
{
@@ -1168,19 +1167,19 @@
|| GET_MODE (SET_DEST (elt)) != V8SImode
|| REGNO (SET_DEST (elt)) != SSE_REGNO (i)
|| SET_SRC (elt) != CONST0_RTX (V8SImode))
- return 0;
+ return false;
}
- return 1;
+ return true;
})
-;; Return 1 if OP is a vzeroupper operation, known to be a PARALLEL.
+;; Return true if OP is a vzeroupper operation, known to be a PARALLEL.
(define_predicate "vzeroupper_operation"
(match_code "parallel")
{
unsigned i, nregs = TARGET_64BIT ? 16 : 8;
if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
- return 0;
+ return false;
for (i = 0; i < nregs; i++)
{
@@ -1190,12 +1189,12 @@
|| GET_CODE (SET_DEST (elt)) != REG
|| GET_MODE (SET_DEST (elt)) != V8SImode
|| REGNO (SET_DEST (elt)) != SSE_REGNO (i))
- return 0;
+ return false;
}
- return 1;
+ return true;
})
-;; Return 1 if OP is a parallel for a vpermilp[ds] permute.
+;; Return true if OP is a parallel for a vpermilp[ds] permute.
;; ??? It would be much easier if the PARALLEL for a VEC_SELECT
;; had a mode, but it doesn't. So we have 4 copies and install
;; the mode by hand.
@@ -1216,7 +1215,7 @@
(and (match_code "parallel")
(match_test "avx_vpermilp_parallel (op, V2DFmode)")))
-;; Return 1 if OP is a parallel for a vperm2f128 permute.
+;; Return true if OP is a parallel for a vperm2f128 permute.
(define_predicate "avx_vperm2f128_v8sf_operand"
(and (match_code "parallel")
@@ -1230,7 +1229,7 @@
(and (match_code "parallel")
(match_test "avx_vperm2f128_parallel (op, V4DFmode)")))
-;; Return 1 if OP is a parallel for a vbroadcast permute.
+;; Return true if OP is a parallel for a vbroadcast permute.
(define_predicate "avx_vbroadcast_operand"
(and (match_code "parallel")
===================================================================
@@ -25,7 +25,7 @@ extern void optimization_options (int, i
extern void ix86_conditional_register_usage (void);
extern bool ix86_target_stack_probe (void);
-extern int ix86_can_use_return_insn_p (void);
+extern bool ix86_can_use_return_insn_p (void);
extern void ix86_setup_frame_addresses (void);
extern HOST_WIDE_INT ix86_initial_elimination_offset (int, int);
@@ -44,7 +44,7 @@ extern const char *standard_80387_consta
extern rtx standard_80387_constant_rtx (int);
extern int standard_sse_constant_p (rtx);
extern const char *standard_sse_constant_opcode (rtx, rtx);
-extern int symbolic_reference_mentioned_p (rtx);
+extern bool symbolic_reference_mentioned_p (rtx);
extern bool extended_reg_mentioned_p (rtx);
extern bool x86_extended_QIreg_mentioned_p (rtx);
extern bool x86_extended_reg_mentioned_p (rtx);
@@ -54,14 +54,14 @@ extern enum machine_mode ix86_cc_mode (e
extern int avx_vpermilp_parallel (rtx par, enum machine_mode mode);
extern int avx_vperm2f128_parallel (rtx par, enum machine_mode mode);
-extern int ix86_expand_movmem (rtx, rtx, rtx, rtx, rtx, rtx);
-extern int ix86_expand_setmem (rtx, rtx, rtx, rtx, rtx, rtx);
-extern int ix86_expand_strlen (rtx, rtx, rtx, rtx);
+extern bool ix86_expand_movmem (rtx, rtx, rtx, rtx, rtx, rtx);
+extern bool ix86_expand_setmem (rtx, rtx, rtx, rtx, rtx, rtx);
+extern bool ix86_expand_strlen (rtx, rtx, rtx, rtx);
extern bool legitimate_constant_p (rtx);
extern bool constant_address_p (rtx);
extern bool legitimate_pic_operand_p (rtx);
-extern int legitimate_pic_address_disp_p (rtx);
+extern bool legitimate_pic_address_disp_p (rtx);
extern void print_reg (rtx, int, FILE*);
extern void ix86_print_operand (FILE *, rtx, int);
@@ -88,7 +88,7 @@ extern void ix86_fixup_binary_operands_n
enum machine_mode, rtx[]);
extern void ix86_expand_binary_operator (enum rtx_code,
enum machine_mode, rtx[]);
-extern int ix86_binary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
+extern bool ix86_binary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
extern bool ix86_lea_for_add_ok (rtx, rtx[]);
extern bool ix86_vec_interleave_v2df_operator_ok (rtx operands[3], bool high);
extern bool ix86_dep_by_shift_count (const_rtx set_insn, const_rtx use_insn);
@@ -109,18 +109,17 @@ extern void ix86_expand_fp_absneg_operat
extern void ix86_expand_copysign (rtx []);
extern void ix86_split_copysign_const (rtx []);
extern void ix86_split_copysign_var (rtx []);
-extern int ix86_unary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
-extern int ix86_match_ccmode (rtx, enum machine_mode);
-extern int ix86_use_fcomi_compare (enum rtx_code);
+extern bool ix86_unary_operator_ok (enum rtx_code, enum machine_mode, rtx[]);
+extern bool ix86_match_ccmode (rtx, enum machine_mode);
extern void ix86_expand_branch (enum rtx_code, rtx, rtx, rtx);
extern void ix86_expand_setcc (rtx, enum rtx_code, rtx, rtx);
-extern int ix86_expand_int_movcc (rtx[]);
-extern int ix86_expand_fp_movcc (rtx[]);
+extern bool ix86_expand_int_movcc (rtx[]);
+extern bool ix86_expand_fp_movcc (rtx[]);
extern bool ix86_expand_fp_vcond (rtx[]);
extern bool ix86_expand_int_vcond (rtx[]);
extern void ix86_expand_sse_unpack (rtx[], bool, bool);
extern void ix86_expand_sse4_unpack (rtx[], bool, bool);
-extern int ix86_expand_int_addcc (rtx[]);
+extern bool ix86_expand_int_addcc (rtx[]);
extern void ix86_expand_call (rtx, rtx, rtx, rtx, rtx, int);
extern void x86_initialize_trampoline (rtx, rtx, rtx);
extern rtx ix86_zero_extend_to_Pmode (rtx);
@@ -129,7 +128,7 @@ extern void ix86_split_ashl (rtx *, rtx,
extern void ix86_split_ashr (rtx *, rtx, enum machine_mode);
extern void ix86_split_lshr (rtx *, rtx, enum machine_mode);
extern rtx ix86_find_base_term (rtx);
-extern int ix86_check_movabs (rtx, int);
+extern bool ix86_check_movabs (rtx, int);
extern rtx assign_386_stack_local (enum machine_mode, enum ix86_stack_slot);
extern int ix86_attr_length_immediate_default (rtx, int);
@@ -152,8 +151,8 @@ extern void ix86_split_fp_branch (enum r
rtx, rtx, rtx, rtx);
extern bool ix86_hard_regno_mode_ok (int, enum machine_mode);
extern bool ix86_modes_tieable_p (enum machine_mode, enum machine_mode);
-extern int ix86_secondary_memory_needed (enum reg_class, enum reg_class,
- enum machine_mode, int);
+extern bool ix86_secondary_memory_needed (enum reg_class, enum reg_class,
+ enum machine_mode, int);
extern bool ix86_cannot_change_mode_class (enum machine_mode,
enum machine_mode, enum reg_class);
extern enum reg_class ix86_preferred_reload_class (rtx, enum reg_class);
===================================================================
@@ -6812,59 +6812,59 @@ ix86_libcall_value (enum machine_mode mo
/* Return true iff type is returned in memory. */
-static int ATTRIBUTE_UNUSED
+static bool ATTRIBUTE_UNUSED
return_in_memory_32 (const_tree type, enum machine_mode mode)
{
HOST_WIDE_INT size;
if (mode == BLKmode)
- return 1;
+ return true;
size = int_size_in_bytes (type);
if (MS_AGGREGATE_RETURN && AGGREGATE_TYPE_P (type) && size <= 8)
- return 0;
+ return false;
if (VECTOR_MODE_P (mode) || mode == TImode)
{
/* User-created vectors small enough to fit in EAX. */
if (size < 8)
- return 0;
+ return false;
/* MMX/3dNow values are returned in MM0,
except when it doesn't exits. */
if (size == 8)
- return (TARGET_MMX ? 0 : 1);
+ return !TARGET_MMX;
/* SSE values are returned in XMM0, except when it doesn't exist. */
if (size == 16)
- return (TARGET_SSE ? 0 : 1);
+ return !TARGET_SSE;
/* AVX values are returned in YMM0, except when it doesn't exist. */
if (size == 32)
- return TARGET_AVX ? 0 : 1;
+ return !TARGET_AVX;
}
if (mode == XFmode)
- return 0;
+ return false;
if (size > 12)
- return 1;
+ return true;
/* OImode shouldn't be used directly. */
gcc_assert (mode != OImode);
- return 0;
+ return false;
}
-static int ATTRIBUTE_UNUSED
+static bool ATTRIBUTE_UNUSED
return_in_memory_64 (const_tree type, enum machine_mode mode)
{
int needed_intregs, needed_sseregs;
return !examine_argument (mode, type, 1, &needed_intregs, &needed_sseregs);
}
-static int ATTRIBUTE_UNUSED
+static bool ATTRIBUTE_UNUSED
return_in_memory_ms_64 (const_tree type, enum machine_mode mode)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
@@ -6872,10 +6872,10 @@ return_in_memory_ms_64 (const_tree type,
/* __m128 is returned in xmm0. */
if ((SCALAR_INT_MODE_P (mode) || VECTOR_MODE_P (mode))
&& !COMPLEX_MODE_P (mode) && (GET_MODE_SIZE (mode) == 16 || size == 16))
- return 0;
+ return false;
/* Otherwise, the size must be exactly in [1248]. */
- return (size != 1 && size != 2 && size != 4 && size != 8);
+ return size != 1 && size != 2 && size != 4 && size != 8;
}
static bool
@@ -7604,10 +7604,10 @@ ix86_gimplify_va_arg (tree valist, tree
return build_va_arg_indirect_ref (addr);
}
-/* Return nonzero if OPNUM's MEM should be matched
+/* Return true if OPNUM's MEM should be matched
in movabs* patterns. */
-int
+bool
ix86_check_movabs (rtx insn, int opnum)
{
rtx set, mem;
@@ -7620,7 +7620,7 @@ ix86_check_movabs (rtx insn, int opnum)
while (GET_CODE (mem) == SUBREG)
mem = SUBREG_REG (mem);
gcc_assert (MEM_P (mem));
- return (volatile_ok || !MEM_VOLATILE_P (mem));
+ return volatile_ok || !MEM_VOLATILE_P (mem);
}
/* Initialize the table of extra 80387 mathematical constants. */
@@ -7649,8 +7649,8 @@ init_ext_80387_constants (void)
ext_80387_constants_init = 1;
}
-/* Return true if the constant is something that can be loaded with
- a special instruction. */
+/* Return non-zero if the constant is something that
+ can be loaded with a special instruction. */
int
standard_80387_constant_p (rtx x)
@@ -7826,16 +7826,16 @@ standard_sse_constant_opcode (rtx insn,
gcc_unreachable ();
}
-/* Returns 1 if OP contains a symbol reference */
+/* Returns true if OP contains a symbol reference */
-int
+bool
symbolic_reference_mentioned_p (rtx op)
{
const char *fmt;
int i;
if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
- return 1;
+ return true;
fmt = GET_RTX_FORMAT (GET_CODE (op));
for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
@@ -7846,23 +7846,23 @@ symbolic_reference_mentioned_p (rtx op)
for (j = XVECLEN (op, i) - 1; j >= 0; j--)
if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
- return 1;
+ return true;
}
else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
-/* Return 1 if it is appropriate to emit `ret' instructions in the
+/* Return true if it is appropriate to emit `ret' instructions in the
body of a function. Do this only if the epilogue is simple, needing a
couple of insns. Prior to reloading, we can't tell how many registers
- must be saved, so return 0 then. Return 0 if there is no frame
+ must be saved, so return false then. Return false if there is no frame
marker to de-allocate. */
-int
+bool
ix86_can_use_return_insn_p (void)
{
struct ix86_frame frame;
@@ -10684,7 +10684,7 @@ legitimate_pic_operand_p (rtx x)
/* Determine if a given CONST RTX is a valid memory displacement
in PIC mode. */
-int
+bool
legitimate_pic_address_disp_p (rtx disp)
{
bool saw_plus;
@@ -10729,7 +10729,7 @@ legitimate_pic_address_disp_p (rtx disp)
}
}
if (GET_CODE (disp) != CONST)
- return 0;
+ return false;
disp = XEXP (disp, 0);
if (TARGET_64BIT)
@@ -10740,28 +10740,28 @@ legitimate_pic_address_disp_p (rtx disp)
|| (XINT (disp, 1) != UNSPEC_GOTPCREL
&& XINT (disp, 1) != UNSPEC_GOTOFF
&& XINT (disp, 1) != UNSPEC_PLTOFF))
- return 0;
+ return false;
if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
&& GET_CODE (XVECEXP (disp, 0, 0)) != LABEL_REF)
- return 0;
- return 1;
+ return false;
+ return true;
}
saw_plus = false;
if (GET_CODE (disp) == PLUS)
{
if (!CONST_INT_P (XEXP (disp, 1)))
- return 0;
+ return false;
disp = XEXP (disp, 0);
saw_plus = true;
}
if (TARGET_MACHO && darwin_local_data_pic (disp))
- return 1;
+ return true;
if (GET_CODE (disp) != UNSPEC)
- return 0;
+ return false;
switch (XINT (disp, 1))
{
@@ -10800,7 +10800,7 @@ legitimate_pic_address_disp_p (rtx disp)
&& SYMBOL_REF_TLS_MODEL (disp) == TLS_MODEL_LOCAL_DYNAMIC);
}
- return 0;
+ return false;
}
/* Recognizes RTL expressions that are valid memory addresses for an
@@ -11663,7 +11663,7 @@ ix86_legitimize_address (rtx x, rtx oldx
}
}
- if (changed && ix86_legitimate_address_p (mode, x, FALSE))
+ if (changed && ix86_legitimate_address_p (mode, x, false))
return x;
if (GET_CODE (XEXP (x, 0)) == MULT)
@@ -11689,7 +11689,7 @@ ix86_legitimize_address (rtx x, rtx oldx
x = legitimize_pic_address (x, 0);
}
- if (changed && ix86_legitimate_address_p (mode, x, FALSE))
+ if (changed && ix86_legitimate_address_p (mode, x, false))
return x;
if (REG_P (XEXP (x, 0)))
@@ -14562,7 +14562,7 @@ ix86_expand_binary_operator (enum rtx_co
/* Return TRUE or FALSE depending on whether the binary operator meets the
appropriate constraints. */
-int
+bool
ix86_binary_operator_ok (enum rtx_code code, enum machine_mode mode,
rtx operands[3])
{
@@ -14572,7 +14572,7 @@ ix86_binary_operator_ok (enum rtx_code c
/* Both source operands cannot be in memory. */
if (MEM_P (src1) && MEM_P (src2))
- return 0;
+ return false;
/* Canonicalize operand order for commutative operators. */
if (ix86_swap_binary_operands_p (code, mode, operands))
@@ -14584,17 +14584,17 @@ ix86_binary_operator_ok (enum rtx_code c
/* If the destination is memory, we must have a matching source operand. */
if (MEM_P (dst) && !rtx_equal_p (dst, src1))
- return 0;
+ return false;
/* Source 1 cannot be a constant. */
if (CONSTANT_P (src1))
- return 0;
+ return false;
/* Source 1 cannot be a non-matching memory. */
if (MEM_P (src1) && !rtx_equal_p (dst, src1))
- return 0;
+ return false;
- return 1;
+ return true;
}
/* Attempt to expand a unary operator. Make the expansion closer to the
@@ -14957,7 +14957,7 @@ ix86_dep_by_shift_count (const_rtx set_i
/* Return TRUE or FALSE depending on whether the unary operator meets the
appropriate constraints. */
-int
+bool
ix86_unary_operator_ok (enum rtx_code code ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
rtx operands[2] ATTRIBUTE_UNUSED)
@@ -14966,8 +14966,8 @@ ix86_unary_operator_ok (enum rtx_code co
if ((MEM_P (operands[0])
|| MEM_P (operands[1]))
&& ! rtx_equal_p (operands[0], operands[1]))
- return FALSE;
- return TRUE;
+ return false;
+ return true;
}
/* Return TRUE if the operands to a vec_interleave_{high,low}v2df
@@ -15545,7 +15545,7 @@ ix86_split_copysign_var (rtx operands[])
has source and destination with matching CC modes, and that the
CC mode is at least as constrained as REQ_MODE. */
-int
+bool
ix86_match_ccmode (rtx insn, enum machine_mode req_mode)
{
rtx set;
@@ -15564,19 +15564,19 @@ ix86_match_ccmode (rtx insn, enum machin
if (req_mode != CCNOmode
&& (req_mode != CCmode
|| XEXP (SET_SRC (set), 1) != const0_rtx))
- return 0;
+ return false;
break;
case CCmode:
if (req_mode == CCGCmode)
- return 0;
+ return false;
/* FALLTHRU */
case CCGCmode:
if (req_mode == CCGOCmode || req_mode == CCNOmode)
- return 0;
+ return false;
/* FALLTHRU */
case CCGOCmode:
if (req_mode == CCZmode)
- return 0;
+ return false;
/* FALLTHRU */
case CCAmode:
case CCCmode:
@@ -15589,7 +15589,7 @@ ix86_match_ccmode (rtx insn, enum machin
gcc_unreachable ();
}
- return (GET_MODE (SET_SRC (set)) == set_mode);
+ return GET_MODE (SET_SRC (set)) == set_mode;
}
/* Generate insn patterns to do an integer compare of OPERANDS. */
@@ -16473,7 +16473,7 @@ ix86_expand_carry_flag_compare (enum rtx
return true;
}
-int
+bool
ix86_expand_int_movcc (rtx operands[])
{
enum rtx_code code = GET_CODE (operands[1]), compare_code;
@@ -16653,7 +16653,7 @@ ix86_expand_int_movcc (rtx operands[])
if (!rtx_equal_p (tmp, out))
emit_move_insn (copy_rtx (out), copy_rtx (tmp));
- return 1; /* DONE */
+ return true;
}
if (diff < 0)
@@ -16731,7 +16731,7 @@ ix86_expand_int_movcc (rtx operands[])
if (out != operands[0])
emit_move_insn (operands[0], out);
- return 1; /* DONE */
+ return true;
}
}
@@ -16790,7 +16790,7 @@ ix86_expand_int_movcc (rtx operands[])
if (!rtx_equal_p (out, operands[0]))
emit_move_insn (operands[0], copy_rtx (out));
- return 1; /* DONE */
+ return true;
}
/*
@@ -16884,7 +16884,7 @@ ix86_expand_int_movcc (rtx operands[])
if (!rtx_equal_p (out, operands[0]))
emit_move_insn (operands[0], copy_rtx (out));
- return 1; /* DONE */
+ return true;
}
}
@@ -16896,7 +16896,7 @@ ix86_expand_int_movcc (rtx operands[])
rtx var, orig_out, out, tmp;
if (BRANCH_COST (optimize_insn_for_speed_p (), false) <= 2)
- return 0; /* FAIL */
+ return false;
/* If one of the two operands is an interesting constant, load a
constant with the above and mask it in with a logical operation. */
@@ -16909,7 +16909,7 @@ ix86_expand_int_movcc (rtx operands[])
else if (INTVAL (operands[2]) == -1 && operands[3] != const0_rtx)
operands[3] = const0_rtx, op = ior_optab;
else
- return 0; /* FAIL */
+ return false;
}
else if (CONST_INT_P (operands[3]))
{
@@ -16919,10 +16919,10 @@ ix86_expand_int_movcc (rtx operands[])
else if (INTVAL (operands[3]) == -1 && operands[3] != const0_rtx)
operands[2] = const0_rtx, op = ior_optab;
else
- return 0; /* FAIL */
+ return false;
}
else
- return 0; /* FAIL */
+ return false;
orig_out = operands[0];
tmp = gen_reg_rtx (mode);
@@ -16930,7 +16930,7 @@ ix86_expand_int_movcc (rtx operands[])
/* Recurse to get the constant loaded. */
if (ix86_expand_int_movcc (operands) == 0)
- return 0; /* FAIL */
+ return false;
/* Mask in the interesting variable. */
out = expand_binop (mode, op, var, tmp, orig_out, 0,
@@ -16938,7 +16938,7 @@ ix86_expand_int_movcc (rtx operands[])
if (!rtx_equal_p (out, orig_out))
emit_move_insn (copy_rtx (orig_out), copy_rtx (out));
- return 1; /* DONE */
+ return true;
}
/*
@@ -16971,8 +16971,7 @@ ix86_expand_int_movcc (rtx operands[])
gen_rtx_IF_THEN_ELSE (mode,
compare_op, operands[2],
operands[3])));
-
- return 1; /* DONE */
+ return true;
}
/* Swap, force into registers, or otherwise massage the two operands
@@ -17170,7 +17169,7 @@ ix86_expand_sse_movcc (rtx dest, rtx cmp
/* Expand a floating-point conditional move. Return true if successful. */
-int
+bool
ix86_expand_fp_movcc (rtx operands[])
{
enum machine_mode mode = GET_MODE (operands[0]);
@@ -17190,20 +17189,20 @@ ix86_expand_fp_movcc (rtx operands[])
if (cmode == VOIDmode)
cmode = GET_MODE (op1);
if (cmode != mode)
- return 0;
+ return false;
code = ix86_prepare_sse_fp_compare_args (operands[0], code, &op0, &op1);
if (code == UNKNOWN)
- return 0;
+ return false;
if (ix86_expand_sse_fp_minmax (operands[0], code, op0, op1,
operands[2], operands[3]))
- return 1;
+ return true;
tmp = ix86_expand_sse_cmp (operands[0], code, op0, op1,
operands[2], operands[3]);
ix86_expand_sse_movcc (operands[0], tmp, operands[2], operands[3]);
- return 1;
+ return true;
}
/* The floating point conditional move instructions don't directly
@@ -17222,7 +17221,7 @@ ix86_expand_fp_movcc (rtx operands[])
gen_rtx_IF_THEN_ELSE (mode, compare_op,
operands[2], operands[3])));
- return 1;
+ return true;
}
/* Expand a floating-point vector conditional move; a vcond operation
@@ -17477,7 +17476,7 @@ ix86_expand_sse4_unpack (rtx operands[2]
/* Expand conditional increment or decrement using adb/sbb instructions.
The default case using setcc followed by the conditional move can be
done by generic code. */
-int
+bool
ix86_expand_int_addcc (rtx operands[])
{
enum rtx_code code = GET_CODE (operands[1]);
@@ -17492,9 +17491,9 @@ ix86_expand_int_addcc (rtx operands[])
if (operands[3] != const1_rtx
&& operands[3] != constm1_rtx)
- return 0;
+ return false;
if (!ix86_expand_carry_flag_compare (code, op0, op1, &compare_op))
- return 0;
+ return false;
code = GET_CODE (compare_op);
flags = XEXP (compare_op, 0);
@@ -17562,7 +17561,7 @@ ix86_expand_int_addcc (rtx operands[])
}
emit_insn (insn (operands[0], operands[2], val, flags, compare_op));
- return 1; /* DONE */
+ return true;
}
@@ -19269,7 +19268,7 @@ smallest_pow2_greater_than (int val)
4) Epilogue: code copying tail of the block that is too small to be
handled by main body (or up to size guarded by prologue guard). */
-int
+bool
ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
rtx expected_align_exp, rtx expected_size_exp)
{
@@ -19305,7 +19304,7 @@ ix86_expand_movmem (rtx dst, rtx src, rt
/* Make sure we don't need to care about overflow later on. */
if (count > ((unsigned HOST_WIDE_INT) 1 << 30))
- return 0;
+ return false;
/* Step 0: Decide on preferred algorithm, desired alignment and
size of chunks to be copied by main loop. */
@@ -19317,7 +19316,7 @@ ix86_expand_movmem (rtx dst, rtx src, rt
align = desired_align;
if (alg == libcall)
- return 0;
+ return false;
gcc_assert (alg != no_stringop);
if (!count)
count_exp = copy_to_mode_reg (GET_MODE (count_exp), count_exp);
@@ -19563,7 +19562,7 @@ ix86_expand_movmem (rtx dst, rtx src, rt
epilogue_size_needed);
if (jump_around_label)
emit_label (jump_around_label);
- return 1;
+ return true;
}
/* Helper function for memcpy. For QImode value 0xXY produce
@@ -19662,7 +19661,7 @@ promote_duplicated_reg_to_size (rtx val,
/* Expand string clear operation (bzero). Use i386 string operations when
profitable. See expand_movmem comment for explanation of individual
steps performed. */
-int
+bool
ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
rtx expected_align_exp, rtx expected_size_exp)
{
@@ -19694,7 +19693,7 @@ ix86_expand_setmem (rtx dst, rtx count_e
/* Make sure we don't need to care about overflow later on. */
if (count > ((unsigned HOST_WIDE_INT) 1 << 30))
- return 0;
+ return false;
/* Step 0: Decide on preferred algorithm, desired alignment and
size of chunks to be copied by main loop. */
@@ -19706,7 +19705,7 @@ ix86_expand_setmem (rtx dst, rtx count_e
align = desired_align;
if (alg == libcall)
- return 0;
+ return false;
gcc_assert (alg != no_stringop);
if (!count)
count_exp = copy_to_mode_reg (counter_mode (count_exp), count_exp);
@@ -19952,7 +19951,7 @@ ix86_expand_setmem (rtx dst, rtx count_e
}
if (jump_around_label)
emit_label (jump_around_label);
- return 1;
+ return true;
}
/* Expand the appropriate insns for doing strlen if not just doing
@@ -20130,7 +20129,7 @@ ix86_expand_strlensi_unroll_1 (rtx out,
/* Expand strlen. */
-int
+bool
ix86_expand_strlen (rtx out, rtx src, rtx eoschar, rtx align)
{
rtx addr, scratch1, scratch2, scratch3, scratch4;
@@ -20142,7 +20141,7 @@ ix86_expand_strlen (rtx out, rtx src, rt
&& !TARGET_INLINE_ALL_STRINGOPS
&& !optimize_insn_for_size_p ()
&& (!CONST_INT_P (align) || INTVAL (align) < 4))
- return 0;
+ return false;
addr = force_reg (Pmode, XEXP (src, 0));
scratch1 = gen_reg_rtx (Pmode);
@@ -20191,7 +20190,7 @@ ix86_expand_strlen (rtx out, rtx src, rt
emit_insn (ix86_gen_one_cmpl2 (scratch2, scratch1));
emit_insn (ix86_gen_add3 (out, scratch2, constm1_rtx));
}
- return 1;
+ return true;
}
/* For given symbol (function) construct code to compute address of it's PLT
@@ -26267,9 +26266,9 @@ ix86_class_likely_spilled_p (reg_class_t
When STRICT is false, we are being called from REGISTER_MOVE_COST, so do not
enforce these sanity checks. */
-static inline int
+static inline bool
inline_secondary_memory_needed (enum reg_class class1, enum reg_class class2,
- enum machine_mode mode, int strict)
+ enum machine_mode mode, int strict)
{
if (MAYBE_FLOAT_CLASS_P (class1) != FLOAT_CLASS_P (class1)
|| MAYBE_FLOAT_CLASS_P (class2) != FLOAT_CLASS_P (class2)
@@ -26310,7 +26309,7 @@ inline_secondary_memory_needed (enum reg
return false;
}
-int
+bool
ix86_secondary_memory_needed (enum reg_class class1, enum reg_class class2,
enum machine_mode mode, int strict)
{