===================================================================
@@ -0,0 +1,283 @@
+;; Machine description for GNU compiler,
+;; for Atmel AVR micro controllers.
+;; Copyright (C) 1998 - 2011
+;; Free Software Foundation, Inc.
+;; Contributed by Georg Lay (avr@gjlay.de)
+;;
+;; This file is part of GCC.
+;;
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 3, or (at your option)
+;; any later version.
+;;
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING3. If not see
+;; <http://www.gnu.org/licenses/>.
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;; The purpose of this file is to provide a light-weight DImode
+;; implementation for AVR. The trouble with DImode is that tree -> RTL
+;; lowering leads to really unpleasant code for operations that don't
+;; work byte-wise like NEG, PLUS, MINUS, etc. Defining optabs entries for
+;; them won't help because the optab machinery assumes these operations
+;; are cheap and does not check if a libgcc implementation is available.
+;;
+;; The DImode insns are all straight forward -- except movdi. The approach
+;; of this implementation is to provide DImode insns without the burden of
+;; introducing movdi.
+;;
+;; The caveat is that if there are insns for some mode, there must also be a
+;; respective move insn that describes reloads. Therefore, this
+;; implementation uses an accumulator-based model with two hard-coded,
+;; accumulator-like registers
+;;
+;; A[] = reg:DI 18
+;; B[] = reg:DI 10
+;;
+;; so that no DImode insn contains pseudos or needs reloading.
+
+(define_constants
+ [(ACC_A 18)
+ (ACC_B 10)])
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Addition
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(define_expand "adddi3"
+ [(parallel [(match_operand:DI 0 "general_operand" "")
+ (match_operand:DI 1 "general_operand" "")
+ (match_operand:DI 2 "general_operand" "")])]
+ "avr_have_dimode"
+ {
+ rtx acc_a = gen_rtx_REG (DImode, ACC_A);
+
+ emit_move_insn (acc_a, operands[1]);
+
+ if (s8_operand (operands[2], VOIDmode))
+ {
+ emit_move_insn (gen_rtx_REG (QImode, REG_X), operands[2]);
+ emit_insn (gen_adddi3_const8_insn ());
+ }
+ else if (CONST_INT_P (operands[2])
+ || CONST_DOUBLE_P (operands[2]))
+ {
+ emit_insn (gen_adddi3_const_insn (operands[2]));
+ }
+ else
+ {
+ emit_move_insn (gen_rtx_REG (DImode, ACC_B), operands[2]);
+ emit_insn (gen_adddi3_insn ());
+ }
+
+ emit_move_insn (operands[0], acc_a);
+ DONE;
+ })
+
+(define_insn "adddi3_insn"
+ [(set (reg:DI ACC_A)
+ (plus:DI (reg:DI ACC_A)
+ (reg:DI ACC_B)))]
+ "avr_have_dimode"
+ "%~call __adddi3"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "clobber")])
+
+(define_insn "adddi3_const8_insn"
+ [(set (reg:DI ACC_A)
+ (plus:DI (reg:DI ACC_A)
+ (sign_extend:DI (reg:QI REG_X))))]
+ "avr_have_dimode"
+ "%~call __adddi3_s8"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "clobber")])
+
+(define_insn "adddi3_const_insn"
+ [(set (reg:DI ACC_A)
+ (plus:DI (reg:DI ACC_A)
+ (match_operand:DI 0 "const_double_operand" "n")))]
+ "avr_have_dimode
+ && !s8_operand (operands[0], VOIDmode)"
+ {
+ return avr_out_plus64 (operands[0], NULL);
+ }
+ [(set_attr "adjust_len" "plus64")
+ (set_attr "cc" "clobber")])
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Subtraction
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(define_expand "subdi3"
+ [(parallel [(match_operand:DI 0 "general_operand" "")
+ (match_operand:DI 1 "general_operand" "")
+ (match_operand:DI 2 "general_operand" "")])]
+ "avr_have_dimode"
+ {
+ rtx acc_a = gen_rtx_REG (DImode, ACC_A);
+
+ emit_move_insn (acc_a, operands[1]);
+ emit_move_insn (gen_rtx_REG (DImode, ACC_B), operands[2]);
+ emit_insn (gen_subdi3_insn ());
+ emit_move_insn (operands[0], acc_a);
+ DONE;
+ })
+
+(define_insn "subdi3_insn"
+ [(set (reg:DI ACC_A)
+ (minus:DI (reg:DI ACC_A)
+ (reg:DI ACC_B)))]
+ "avr_have_dimode"
+ "%~call __subdi3"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "set_czn")])
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Negation
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(define_expand "negdi2"
+ [(parallel [(match_operand:DI 0 "general_operand" "")
+ (match_operand:DI 1 "general_operand" "")])]
+ "avr_have_dimode"
+ {
+ rtx acc_a = gen_rtx_REG (DImode, ACC_A);
+
+ emit_move_insn (acc_a, operands[1]);
+ emit_insn (gen_negdi2_insn ());
+ emit_move_insn (operands[0], acc_a);
+ DONE;
+ })
+
+(define_insn "negdi2_insn"
+ [(set (reg:DI ACC_A)
+ (neg:DI (reg:DI ACC_A)))]
+ "avr_have_dimode"
+ "%~call __negdi2"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "clobber")])
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Comparison
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(define_expand "conditional_jump"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "ordered_comparison_operator" [(cc0)
+ (const_int 0)])
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ "avr_have_dimode")
+
+(define_expand "cbranchdi4"
+ [(parallel [(match_operand:DI 1 "register_operand" "")
+ (match_operand:DI 2 "nonmemory_operand" "")
+ (match_operator 0 "ordered_comparison_operator" [(cc0)
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))])]
+ "avr_have_dimode"
+ {
+ rtx acc_a = gen_rtx_REG (DImode, ACC_A);
+
+ emit_move_insn (acc_a, operands[1]);
+
+ if (s8_operand (operands[2], VOIDmode))
+ {
+ emit_move_insn (gen_rtx_REG (QImode, REG_X), operands[2]);
+ emit_insn (gen_compare_const8_di2 ());
+ }
+ else if (CONST_INT_P (operands[2])
+ || CONST_DOUBLE_P (operands[2]))
+ {
+ emit_insn (gen_compare_const_di2 (operands[2]));
+ }
+ else
+ {
+ emit_move_insn (gen_rtx_REG (DImode, ACC_B), operands[2]);
+ emit_insn (gen_compare_di2 ());
+ }
+
+ emit_jump_insn (gen_conditional_jump (operands[0], operands[3]));
+ DONE;
+ })
+
+(define_insn "compare_di2"
+ [(set (cc0)
+ (compare (reg:DI ACC_A)
+ (reg:DI ACC_B)))]
+ "avr_have_dimode"
+ "%~call __cmpdi2"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "compare")])
+
+(define_insn "compare_const8_di2"
+ [(set (cc0)
+ (compare (reg:DI ACC_A)
+ (sign_extend:DI (reg:QI REG_X))))]
+ "avr_have_dimode"
+ "%~call __cmpdi2_s8"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "compare")])
+
+(define_insn "compare_const_di2"
+ [(set (cc0)
+ (compare (reg:DI ACC_A)
+ (match_operand:DI 0 "const_double_operand" "n")))
+ (clobber (match_scratch:QI 1 "=&d"))]
+ "avr_have_dimode
+ && !s8_operand (operands[0], VOIDmode)"
+ {
+ return avr_out_compare64 (insn, operands, NULL);
+ }
+ [(set_attr "adjust_len" "compare64")
+ (set_attr "cc" "compare")])
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Shifts and Rotate
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(define_code_iterator di_shifts
+ [ashift ashiftrt lshiftrt rotate])
+
+;; Shift functions from libgcc are called without defining these insns,
+;; but with them we can describe their reduced register footprint.
+
+;; "ashldi3"
+;; "ashrdi3"
+;; "lshrdi3"
+;; "rotldi3"
+(define_expand "<code_stdname>di3"
+ [(parallel [(match_operand:DI 0 "general_operand" "")
+ (di_shifts:DI (match_operand:DI 1 "general_operand" "")
+ (match_operand:QI 2 "general_operand" ""))])]
+ "avr_have_dimode"
+ {
+ rtx acc_a = gen_rtx_REG (DImode, ACC_A);
+
+ emit_move_insn (acc_a, operands[1]);
+ emit_move_insn (gen_rtx_REG (QImode, 16), operands[2]);
+ emit_insn (gen_<code_stdname>di3_insn ());
+ emit_move_insn (operands[0], acc_a);
+ DONE;
+ })
+
+(define_insn "<code_stdname>di3_insn"
+ [(set (reg:DI ACC_A)
+ (di_shifts:DI (reg:DI ACC_A)
+ (reg:QI 16)))]
+ "avr_have_dimode"
+ "%~call __<code_stdname>di3"
+ [(set_attr "adjust_len" "call")
+ (set_attr "cc" "clobber")])
===================================================================
@@ -131,8 +131,8 @@ (define_attr "length" ""
;; Otherwise do special processing depending on the attribute.
(define_attr "adjust_len"
- "out_bitop, out_plus, out_plus_noclobber, addto_sp,
- tsthi, tstpsi, tstsi, compare, call,
+ "out_bitop, out_plus, out_plus_noclobber, plus64, addto_sp,
+ tsthi, tstpsi, tstsi, compare, compare64, call,
mov8, mov16, mov24, mov32, reload_in16, reload_in24, reload_in32,
xload, movmem,
ashlqi, ashrqi, lshrqi,
@@ -206,7 +206,6 @@ (define_mode_iterator QIHI [(QI "") (HI
(define_mode_iterator QIHI2 [(QI "") (HI "")])
(define_mode_iterator QISI [(QI "") (HI "") (PSI "") (SI "")])
(define_mode_iterator QIDI [(QI "") (HI "") (PSI "") (SI "") (DI "")])
-(define_mode_iterator HIDI [(HI "") (PSI "") (SI "") (DI "")])
(define_mode_iterator HISI [(HI "") (PSI "") (SI "")])
;; All supported move-modes
@@ -235,6 +234,12 @@ (define_code_attr mul_r_d
[(zero_extend "r")
(sign_extend "d")])
+;; Map RTX code to its standard insn name
+(define_code_attr code_stdname
+ [(ashift "ashl")
+ (ashiftrt "ashr")
+ (lshiftrt "lshr")
+ (rotate "rotl")])
;;========================================================================
;; The following is used by nonlocal_goto and setjmp.
@@ -2956,23 +2961,21 @@ (define_insn "*rotlqi3"
[(set_attr "length" "2,4,4,1,3,5,3,0")
(set_attr "cc" "set_n,set_n,clobber,none,set_n,set_n,clobber,none")])
-;; Split all rotates of HI,SI and DImode registers where rotation is by
+;; Split all rotates of HI,SI and PSImode registers where rotation is by
;; a whole number of bytes. The split creates the appropriate moves and
-;; considers all overlap situations. DImode is split before reload.
+;; considers all overlap situations.
;; HImode does not need scratch. Use attribute for this constraint.
-;; Use QI scratch for DI mode as this is often split into byte sized operands.
-(define_mode_attr rotx [(DI "&r,&r,X") (SI "&r,&r,X") (PSI "&r,&r,X") (HI "X,X,X")])
-(define_mode_attr rotsmode [(DI "QI") (SI "HI") (PSI "QI") (HI "QI")])
+(define_mode_attr rotx [(SI "&r,&r,X") (PSI "&r,&r,X") (HI "X,X,X")])
+(define_mode_attr rotsmode [(SI "HI") (PSI "QI") (HI "QI")])
;; "rotlhi3"
;; "rotlpsi3"
;; "rotlsi3"
-;; "rotldi3"
(define_expand "rotl<mode>3"
- [(parallel [(set (match_operand:HIDI 0 "register_operand" "")
- (rotate:HIDI (match_operand:HIDI 1 "register_operand" "")
+ [(parallel [(set (match_operand:HISI 0 "register_operand" "")
+ (rotate:HISI (match_operand:HISI 1 "register_operand" "")
(match_operand:VOID 2 "const_int_operand" "")))
(clobber (match_dup 3))])]
""
@@ -2991,9 +2994,8 @@ (define_expand "rotl<mode>3"
else
operands[3] = gen_rtx_SCRATCH (QImode);
}
- else if (<MODE>mode != DImode
- && (offset == 1
- || offset == GET_MODE_BITSIZE (<MODE>mode) -1))
+ else if (offset == 1
+ || offset == GET_MODE_BITSIZE (<MODE>mode) -1)
{
/*; Support rotate left/right by 1 */
@@ -3069,18 +3071,17 @@ (define_insn "*rotlsi2.31"
;; "*rotwhi"
;; "*rotwsi"
-;; "*rotwdi"
(define_insn_and_split "*rotw<mode>"
- [(set (match_operand:HIDI 0 "register_operand" "=r,r,#&r")
- (rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
- (match_operand 2 "const_int_operand" "n,n,n")))
+ [(set (match_operand:HISI 0 "register_operand" "=r,r,#&r")
+ (rotate:HISI (match_operand:HISI 1 "register_operand" "0,r,r")
+ (match_operand 2 "const_int_operand" "n,n,n")))
(clobber (match_scratch:<rotsmode> 3 "=<rotx>"))]
"AVR_HAVE_MOVW
&& CONST_INT_P (operands[2])
&& GET_MODE_SIZE (<MODE>mode) % 2 == 0
&& 0 == INTVAL (operands[2]) % 16"
"#"
- "&& (reload_completed || <MODE>mode == DImode)"
+ "&& reload_completed"
[(const_int 0)]
{
avr_rotate_bytes (operands);
@@ -3093,11 +3094,10 @@ (define_insn_and_split "*rotw<mode>"
;; "*rotbhi"
;; "*rotbpsi"
;; "*rotbsi"
-;; "*rotbdi"
(define_insn_and_split "*rotb<mode>"
- [(set (match_operand:HIDI 0 "register_operand" "=r,r,#&r")
- (rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
- (match_operand 2 "const_int_operand" "n,n,n")))
+ [(set (match_operand:HISI 0 "register_operand" "=r,r,#&r")
+ (rotate:HISI (match_operand:HISI 1 "register_operand" "0,r,r")
+ (match_operand 2 "const_int_operand" "n,n,n")))
(clobber (match_scratch:QI 3 "=<rotx>"))]
"CONST_INT_P (operands[2])
&& (8 == INTVAL (operands[2]) % 16
@@ -3105,7 +3105,7 @@ (define_insn_and_split "*rotb<mode>"
|| GET_MODE_SIZE (<MODE>mode) % 2 != 0)
&& 0 == INTVAL (operands[2]) % 16))"
"#"
- "&& (reload_completed || <MODE>mode == DImode)"
+ "&& reload_completed"
[(const_int 0)]
{
avr_rotate_bytes (operands);
@@ -5779,3 +5779,5 @@ (define_insn_and_split "*extzv.qihi2"
operands[3] = simplify_gen_subreg (QImode, operands[0], HImode, 0);
operands[4] = simplify_gen_subreg (QImode, operands[0], HImode, 1);
})
+
+(include "avr-dimode.md")
===================================================================
@@ -77,3 +77,7 @@ When accessing RAM, use X as imposed by
mbranch-cost=
Target Report RejectNegative Joined UInteger Var(avr_branch_cost) Init(0)
Set the cost of a branch instruction. Default value is 0.
+
+m64
+Target Report Var(avr_have_dimode) Init(0)
+Experimental.
===================================================================
@@ -56,6 +56,7 @@ extern const char *avr_out_tstsi (rtx, r
extern const char *avr_out_tsthi (rtx, rtx*, int*);
extern const char *avr_out_tstpsi (rtx, rtx*, int*);
extern const char *avr_out_compare (rtx, rtx*, int*);
+extern const char *avr_out_compare64 (rtx, rtx*, int*);
extern const char *ret_cond_branch (rtx x, int len, int reverse);
extern const char *avr_out_movpsi (rtx, rtx*, int*);
@@ -89,6 +90,7 @@ extern const char *avr_out_sbxx_branch (
extern const char* avr_out_bitop (rtx, rtx*, int*);
extern const char* avr_out_plus (rtx*, int*, int*);
extern const char* avr_out_plus_noclobber (rtx*, int*, int*);
+extern const char* avr_out_plus64 (rtx, int*);
extern const char* avr_out_addto_sp (rtx*, int*);
extern const char* avr_out_xload (rtx, rtx*, int*);
extern const char* avr_out_movmem (rtx, rtx*, int*);
===================================================================
@@ -4031,14 +4031,17 @@ avr_out_compare (rtx insn, rtx *xop, int
/* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
int clobber_val = -1;
- gcc_assert (REG_P (xreg)
- && CONST_INT_P (xval));
+ gcc_assert (REG_P (xreg));
+ gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
+ || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
if (plen)
*plen = 0;
/* Comparisons == +/-1 and != +/-1 can be done similar to camparing
- against 0 by ORing the bytes. This is one instruction shorter. */
+ against 0 by ORing the bytes. This is one instruction shorter.
+ Notice that DImode comparisons are always against reg:DI 18
+ and therefore don't use this. */
if (!test_hard_reg_class (LD_REGS, xreg)
&& compare_eq_p (insn)
@@ -4156,6 +4159,20 @@ avr_out_compare (rtx insn, rtx *xop, int
}
+/* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
+
+const char*
+avr_out_compare64 (rtx insn, rtx *op, int *plen)
+{
+ rtx xop[3];
+
+ xop[0] = gen_rtx_REG (DImode, 18);
+ xop[1] = op[0];
+ xop[2] = op[1];
+
+ return avr_out_compare (insn, xop, plen);
+}
+
/* Output test instruction for HImode. */
const char*
@@ -5795,7 +5812,7 @@ avr_out_plus_1 (rtx *xop, int *plen, enu
*pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
if (MINUS == code)
- xval = gen_int_mode (-UINTVAL (xval), mode);
+ xval = simplify_unary_operation (NEG, mode, xval, mode);
op[2] = xop[3];
@@ -5970,6 +5987,25 @@ avr_out_plus_noclobber (rtx *xop, int *p
return avr_out_plus (op, plen, pcc);
}
+
+/* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
+
+const char*
+avr_out_plus64 (rtx addend, int *plen)
+{
+ int cc_dummy;
+ rtx op[4];
+
+ op[0] = gen_rtx_REG (DImode, 18);
+ op[1] = op[0];
+ op[2] = addend;
+ op[3] = NULL_RTX;
+
+ avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
+
+ return "";
+}
+
/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
time constant XOP[2]:
@@ -6355,6 +6391,7 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
+ case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
avr_out_plus_noclobber (op, &len, NULL); break;
@@ -6371,6 +6408,7 @@ adjust_insn_length (rtx insn, int len)
case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
+ case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
@@ -8327,7 +8365,9 @@ avr_compare_pattern (rtx insn)
if (pattern
&& NONJUMP_INSN_P (insn)
&& SET_DEST (pattern) == cc0_rtx
- && GET_CODE (SET_SRC (pattern)) == COMPARE)
+ && GET_CODE (SET_SRC (pattern)) == COMPARE
+ && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
+ && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
{
return pattern;
}
===================================================================
@@ -1161,6 +1161,71 @@ ENDF __divdi3_moddi3
#endif /* L_divdi3 */
+.section .text.libgcc, "ax", @progbits
+
+#define TT __tmp_reg__
+
+#if defined (L_adddi3)
+;; (set (reg:DI 18)
+;; (plus:DI (reg:DI 18)
+;; (reg:DI 10)))
+DEFUN __adddi3
+ ADD A0,B0 $ adc A1,B1 $ adc A2,B2 $ adc A3,B3
+ adc A4,B4 $ adc A5,B5 $ adc A6,B6 $ adc A7,B7
+ ret
+ENDF __adddi3
+#endif /* L_adddi3 */
+
+#if defined (L_adddi3_s8)
+;; (set (reg:DI 18)
+;; (plus:DI (reg:DI 18)
+;; (sign_extend:SI (reg:QI 26))))
+DEFUN __adddi3_s8
+ clr TT
+ sbrc r26, 7
+ com TT
+ ADD A0,r26 $ adc A1,TT $ adc A2,TT $ adc A3,TT
+ adc A4,TT $ adc A5,TT $ adc A6,TT $ adc A7,TT
+ ret
+ENDF __adddi3_s8
+#endif /* L_adddi3_s8 */
+
+#if defined (L_subdi3)
+;; (set (reg:DI 18)
+;; (minus:DI (reg:DI 18)
+;; (reg:DI 10)))
+DEFUN __subdi3
+ SUB A0,B0 $ sbc A1,B1 $ sbc A2,B2 $ sbc A3,B3
+ sbc A4,B4 $ sbc A5,B5 $ sbc A6,B6 $ sbc A7,B7
+ ret
+ENDF __subdi3
+#endif /* L_subdi3 */
+
+#if defined (L_cmpdi2)
+;; (set (cc0)
+;; (compare (reg:DI 18)
+;; (reg:DI 10)))
+DEFUN __cmpdi2
+ CP A0,B0 $ cpc A1,B1 $ cpc A2,B2 $ cpc A3,B3
+ cpc A4,B4 $ cpc A5,B5 $ cpc A6,B6 $ cpc A7,B7
+ ret
+ENDF __cmpdi2
+#endif /* L_cmpdi2 */
+
+#if defined (L_cmpdi2_s8)
+;; (set (cc0)
+;; (compare (reg:DI 18)
+;; (sign_extend:SI (reg:QI 26))))
+DEFUN __cmpdi2_s8
+ clr TT
+ sbrc r26, 7
+ com TT
+ CP A0,r26 $ cpc A1,TT $ cpc A2,TT $ cpc A3,TT
+ cpc A4,TT $ cpc A5,TT $ cpc A6,TT $ cpc A7,TT
+ ret
+ENDF __cmpdi2_s8
+#endif /* L_cmpdi2_s8 */
+
#if defined (L_negdi2)
DEFUN __negdi2
@@ -1174,6 +1239,8 @@ DEFUN __negdi2
ENDF __negdi2
#endif /* L_negdi2 */
+#undef TT
+
#undef C7
#undef C6
#undef C5
@@ -2052,6 +2119,29 @@ DEFUN __ashldi3
ENDF __ashldi3
#endif /* defined (L_ashldi3) */
+#if defined (L_rotldi3)
+;; Shift left
+;; r25:r18 = rotl64 (r25:r18, r17:r16)
+DEFUN __rotldi3
+ push r16
+ andi r16, 63
+ breq 2f
+1: lsl r18
+ rol r19
+ rol r20
+ rol r21
+ rol r22
+ rol r23
+ rol r24
+ rol r25
+ adc r18, __zero_reg__
+ dec r16
+ brne 1b
+2: pop r16
+ ret
+ENDF __rotldi3
+#endif /* defined (L_rotldi3) */
+
.section .text.libgcc.fmul, "ax", @progbits
===================================================================
@@ -47,9 +47,9 @@ LIB1ASMFUNCS = \
_popcountqi2 \
_bswapsi2 \
_bswapdi2 \
- _ashldi3 \
- _ashrdi3 \
- _lshrdi3 \
+ _ashldi3 _ashrdi3 _lshrdi3 _rotldi3 \
+ _adddi3 _adddi3_s8 _subdi3 \
+ _cmpdi2 _cmpdi2_s8 \
_fmul _fmuls _fmulsu
LIB2FUNCS_EXCLUDE = \