@@ -643,7 +643,7 @@
convert_move (small_swap, swap, 0);
low_product = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmulouh (low_product, one, two));
+ emit_insn (gen_vec_widen_umult_odd_v8hi (low_product, one, two));
high_product = gen_reg_rtx (V4SImode);
emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
@@ -667,8 +667,8 @@
rtx high = gen_reg_rtx (V4SImode);
rtx low = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmulesh (even, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulosh (odd, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_even_v8hi (even, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_odd_v8hi (odd, operands[1], operands[2]));
emit_insn (gen_altivec_vmrghw (high, even, odd));
emit_insn (gen_altivec_vmrglw (low, even, odd));
@@ -936,7 +936,7 @@
"vmrglw %0,%1,%2"
[(set_attr "type" "vecperm")])
-(define_insn "altivec_vmuleub"
+(define_insn "vec_widen_umult_even_v16qi"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
(match_operand:V16QI 2 "register_operand" "v")]
@@ -945,7 +945,7 @@
"vmuleub %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmulesb"
+(define_insn "vec_widen_smult_even_v16qi"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
(match_operand:V16QI 2 "register_operand" "v")]
@@ -954,7 +954,7 @@
"vmulesb %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmuleuh"
+(define_insn "vec_widen_umult_even_v8hi"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
(match_operand:V8HI 2 "register_operand" "v")]
@@ -963,7 +963,7 @@
"vmuleuh %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmulesh"
+(define_insn "vec_widen_smult_even_v8hi"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
(match_operand:V8HI 2 "register_operand" "v")]
@@ -972,7 +972,7 @@
"vmulesh %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmuloub"
+(define_insn "vec_widen_umult_odd_v16qi"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
(match_operand:V16QI 2 "register_operand" "v")]
@@ -981,7 +981,7 @@
"vmuloub %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmulosb"
+(define_insn "vec_widen_smult_odd_v16qi"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
(match_operand:V16QI 2 "register_operand" "v")]
@@ -990,7 +990,7 @@
"vmulosb %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmulouh"
+(define_insn "vec_widen_umult_odd_v8hi"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
(match_operand:V8HI 2 "register_operand" "v")]
@@ -999,7 +999,7 @@
"vmulouh %0,%1,%2"
[(set_attr "type" "veccomplex")])
-(define_insn "altivec_vmulosh"
+(define_insn "vec_widen_smult_odd_v8hi"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
(match_operand:V8HI 2 "register_operand" "v")]
@@ -2175,8 +2175,8 @@
rtx ve = gen_reg_rtx (V8HImode);
rtx vo = gen_reg_rtx (V8HImode);
- emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_even_v16qi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_odd_v16qi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
DONE;
}")
@@ -2192,8 +2192,8 @@
rtx ve = gen_reg_rtx (V8HImode);
rtx vo = gen_reg_rtx (V8HImode);
- emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_even_v16qi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_odd_v16qi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
DONE;
}")
@@ -2209,8 +2209,8 @@
rtx ve = gen_reg_rtx (V8HImode);
rtx vo = gen_reg_rtx (V8HImode);
- emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_even_v16qi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_odd_v16qi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrghh (operands[0], ve, vo));
DONE;
}")
@@ -2226,8 +2226,8 @@
rtx ve = gen_reg_rtx (V8HImode);
rtx vo = gen_reg_rtx (V8HImode);
- emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_even_v16qi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_odd_v16qi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrglh (operands[0], ve, vo));
DONE;
}")
@@ -2243,8 +2243,8 @@
rtx ve = gen_reg_rtx (V4SImode);
rtx vo = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_even_v8hi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_odd_v8hi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
DONE;
}")
@@ -2260,8 +2260,8 @@
rtx ve = gen_reg_rtx (V4SImode);
rtx vo = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_even_v8hi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_umult_odd_v8hi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
DONE;
}")
@@ -2277,8 +2277,8 @@
rtx ve = gen_reg_rtx (V4SImode);
rtx vo = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_even_v8hi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_odd_v8hi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrghw (operands[0], ve, vo));
DONE;
}")
@@ -2294,8 +2294,8 @@
rtx ve = gen_reg_rtx (V4SImode);
rtx vo = gen_reg_rtx (V4SImode);
- emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
- emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_even_v8hi (ve, operands[1], operands[2]));
+ emit_insn (gen_vec_widen_smult_odd_v8hi (vo, operands[1], operands[2]));
emit_insn (gen_altivec_vmrglw (operands[0], ve, vo));
DONE;
}")
@@ -503,18 +503,18 @@ BU_ALTIVEC_2 (VMINSH, "vminsh", CONST, sminv8hi3)
BU_ALTIVEC_2 (VMINUW, "vminuw", CONST, uminv4si3)
BU_ALTIVEC_2 (VMINSW, "vminsw", CONST, sminv4si3)
BU_ALTIVEC_2 (VMINFP, "vminfp", CONST, sminv4sf3)
-BU_ALTIVEC_2 (VMULEUB, "vmuleub", CONST, altivec_vmuleub)
-BU_ALTIVEC_2 (VMULEUB_UNS, "vmuleub_uns", CONST, altivec_vmuleub)
-BU_ALTIVEC_2 (VMULESB, "vmulesb", CONST, altivec_vmulesb)
-BU_ALTIVEC_2 (VMULEUH, "vmuleuh", CONST, altivec_vmuleuh)
-BU_ALTIVEC_2 (VMULEUH_UNS, "vmuleuh_uns", CONST, altivec_vmuleuh)
-BU_ALTIVEC_2 (VMULESH, "vmulesh", CONST, altivec_vmulesh)
-BU_ALTIVEC_2 (VMULOUB, "vmuloub", CONST, altivec_vmuloub)
-BU_ALTIVEC_2 (VMULOUB_UNS, "vmuloub_uns", CONST, altivec_vmuloub)
-BU_ALTIVEC_2 (VMULOSB, "vmulosb", CONST, altivec_vmulosb)
-BU_ALTIVEC_2 (VMULOUH, "vmulouh", CONST, altivec_vmulouh)
-BU_ALTIVEC_2 (VMULOUH_UNS, "vmulouh_uns", CONST, altivec_vmulouh)
-BU_ALTIVEC_2 (VMULOSH, "vmulosh", CONST, altivec_vmulosh)
+BU_ALTIVEC_2 (VMULEUB, "vmuleub", CONST, vec_widen_umult_even_v16qi)
+BU_ALTIVEC_2 (VMULEUB_UNS, "vmuleub_uns", CONST, vec_widen_umult_even_v16qi)
+BU_ALTIVEC_2 (VMULESB, "vmulesb", CONST, vec_widen_smult_even_v16qi)
+BU_ALTIVEC_2 (VMULEUH, "vmuleuh", CONST, vec_widen_umult_even_v8hi)
+BU_ALTIVEC_2 (VMULEUH_UNS, "vmuleuh_uns", CONST, vec_widen_umult_even_v8hi)
+BU_ALTIVEC_2 (VMULESH, "vmulesh", CONST, vec_widen_smult_even_v8hi)
+BU_ALTIVEC_2 (VMULOUB, "vmuloub", CONST, vec_widen_umult_odd_v16qi)
+BU_ALTIVEC_2 (VMULOUB_UNS, "vmuloub_uns", CONST, vec_widen_umult_odd_v16qi)
+BU_ALTIVEC_2 (VMULOSB, "vmulosb", CONST, vec_widen_smult_odd_v16qi)
+BU_ALTIVEC_2 (VMULOUH, "vmulouh", CONST, vec_widen_umult_odd_v8hi)
+BU_ALTIVEC_2 (VMULOUH_UNS, "vmulouh_uns", CONST, vec_widen_umult_odd_v8hi)
+BU_ALTIVEC_2 (VMULOSH, "vmulosh", CONST, vec_widen_smult_odd_v8hi)
BU_ALTIVEC_2 (VNOR, "vnor", CONST, norv4si3)
BU_ALTIVEC_2 (VOR, "vor", CONST, iorv4si3)
BU_ALTIVEC_2 (VPKUHUM, "vpkuhum", CONST, altivec_vpkuhum)