@@ -529,6 +529,8 @@ VSTRIBR 000100 ..... 00001 ..... . 0000001101 @VX_tb_rc
VSTRIHL 000100 ..... 00010 ..... . 0000001101 @VX_tb_rc
VSTRIHR 000100 ..... 00011 ..... . 0000001101 @VX_tb_rc
+VCLRLB 000100 ..... ..... ..... 00110001101 @VX
+
# VSX Load/Store Instructions
LXV 111101 ..... ..... ............ . 001 @DQ_TSX
@@ -1940,6 +1940,46 @@ TRANS(VSTRIBR, do_vstri, gen_helper_VSTRIBR)
TRANS(VSTRIHL, do_vstri, gen_helper_VSTRIHL)
TRANS(VSTRIHR, do_vstri, gen_helper_VSTRIHR)
+static bool trans_VCLRLB(DisasContext *ctx, arg_VX *a)
+{
+ TCGv_i64 rb, mh, ml, tmp,
+ ones = tcg_constant_i64(-1),
+ zero = tcg_constant_i64(0);
+
+ rb = tcg_temp_new_i64();
+ mh = tcg_temp_new_i64();
+ ml = tcg_temp_new_i64();
+ tmp = tcg_temp_new_i64();
+
+ tcg_gen_extu_tl_i64(rb, cpu_gpr[a->vrb]);
+ tcg_gen_andi_i64(tmp, rb, 7);
+ tcg_gen_shli_i64(tmp, tmp, 3);
+ tcg_gen_shl_i64(tmp, ones, tmp);
+ tcg_gen_not_i64(tmp, tmp);
+
+ tcg_gen_movcond_i64(TCG_COND_LTU, ml, rb, tcg_constant_i64(8),
+ tmp, ones);
+ tcg_gen_movcond_i64(TCG_COND_LTU, mh, rb, tcg_constant_i64(8),
+ zero, tmp);
+ tcg_gen_movcond_i64(TCG_COND_LTU, mh, rb, tcg_constant_i64(16),
+ mh, ones);
+
+ get_avr64(tmp, a->vra, true);
+ tcg_gen_and_i64(tmp, tmp, mh);
+ set_avr64(a->vrt, tmp, true);
+
+ get_avr64(tmp, a->vra, false);
+ tcg_gen_and_i64(tmp, tmp, ml);
+ set_avr64(a->vrt, tmp, false);
+
+ tcg_temp_free_i64(rb);
+ tcg_temp_free_i64(mh);
+ tcg_temp_free_i64(ml);
+ tcg_temp_free_i64(tmp);
+
+ return true;
+}
+
#define GEN_VAFORM_PAIRED(name0, name1, opc2) \
static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
{ \