@@ -21,6 +21,8 @@
%rs1_3 7:3 !function=ex_rvc_register
%rs2_3 2:3 !function=ex_rvc_register
%rs2_5 2:5
+%r1s 7:3 !function=ex_sreg_register
+%r2s 2:3 !function=ex_sreg_register
# Immediates:
%imm_ci 12:s1 2:5
@@ -45,6 +47,8 @@
%uimm_cl_b 5:1 6:1
%uimm_cl_h 5:1 !function=ex_shift_1
+%spimm 2:2 !function=ex_shift_4
+%urlist 4:4
# Argument sets imported from insn32.decode:
&empty !extern
@@ -56,7 +60,9 @@
&u imm rd !extern
&shift shamt rs1 rd !extern
&r2 rd rs1 !extern
+&r2_s rs1 rs2 !extern
+&cmpp urlist spimm
# Formats 16:
@cr .... ..... ..... .. &r rs2=%rs2_5 rs1=%rd %rd
@@ -97,6 +103,8 @@
@cl_h ... . .. ... .. ... .. &i imm=%uimm_cl_h rs1=%rs1_3 rd=%rs2_3
@cs_b ... . .. ... .. ... .. &s imm=%uimm_cl_b rs1=%rs1_3 rs2=%rs2_3
@cs_h ... . .. ... .. ... .. &s imm=%uimm_cl_h rs1=%rs1_3 rs2=%rs2_3
+@cm_pp ... ... ........ .. &cmpp %urlist %spimm
+@cm_mv ... ... ... .. ... .. &r2_s rs2=%r2s rs1=%r1s
# *** RV32/64C Standard Extension (Quadrant 0) ***
{
@@ -176,6 +184,16 @@ slli 000 . ..... ..... 10 @c_shift2
{
sq 101 ... ... .. ... 10 @c_sqsp
c_fsd 101 ...... ..... 10 @c_sdsp
+
+ # *** RV64 and RV32 Zcmp Extension ***
+ [
+ cm_push 101 11000 .... .. 10 @cm_pp
+ cm_pop 101 11010 .... .. 10 @cm_pp
+ cm_popret 101 11110 .... .. 10 @cm_pp
+ cm_popretz 101 11100 .... .. 10 @cm_pp
+ cm_mva01s 101 011 ... 11 ... 10 @cm_mv
+ cm_mvsa01 101 011 ... 01 ... 10 @cm_mv
+ ]
}
sw 110 . ..... ..... 10 @c_swsp
@@ -757,6 +757,11 @@ static int ex_rvc_register(DisasContext *ctx, int reg)
return 8 + reg;
}
+static int ex_sreg_register(DisasContext *ctx, int reg)
+{
+ return reg < 2 ? reg + 8 : reg + 16;
+}
+
static int ex_rvc_shiftli(DisasContext *ctx, int imm)
{
/* For RV128 a shamt of 0 means a shift by 64. */
@@ -1,5 +1,5 @@
/*
- * RISC-V translation routines for the Zcb Standard Extension.
+ * RISC-V translation routines for the Zc[b,mp] Standard Extensions.
*
* Copyright (c) 2021-2022 PLCT Lab
*
@@ -21,6 +21,11 @@
return false; \
} while (0)
+#define REQUIRE_ZCMP(ctx) do { \
+ if (!ctx->cfg_ptr->ext_zcmp) \
+ return false; \
+} while (0)
+
static bool trans_c_zext_b(DisasContext *ctx, arg_c_zext_b *a)
{
REQUIRE_ZCB(ctx);
@@ -98,3 +103,183 @@ static bool trans_c_sh(DisasContext *ctx, arg_c_sh *a)
REQUIRE_ZCB(ctx);
return gen_store(ctx, a, MO_UW);
}
+
+#define X_S0 8
+#define X_S1 9
+#define X_Sn 16
+
+static uint32_t decode_push_pop_list(DisasContext *ctx, target_ulong rlist)
+{
+ uint32_t reg_bitmap = 0;
+
+ if (ctx->cfg_ptr->ext_e && rlist > 6) {
+ return 0;
+ }
+
+ switch (rlist) {
+ case 15:
+ reg_bitmap |= 1 << (X_Sn + 11) ;
+ reg_bitmap |= 1 << (X_Sn + 10) ;
+ /* FALL THROUGH */
+ case 14:
+ reg_bitmap |= 1 << (X_Sn + 9) ;
+ /* FALL THROUGH */
+ case 13:
+ reg_bitmap |= 1 << (X_Sn + 8) ;
+ /* FALL THROUGH */
+ case 12:
+ reg_bitmap |= 1 << (X_Sn + 7) ;
+ /* FALL THROUGH */
+ case 11:
+ reg_bitmap |= 1 << (X_Sn + 6) ;
+ /* FALL THROUGH */
+ case 10:
+ reg_bitmap |= 1 << (X_Sn + 5) ;
+ /* FALL THROUGH */
+ case 9:
+ reg_bitmap |= 1 << (X_Sn + 4) ;
+ /* FALL THROUGH */
+ case 8:
+ reg_bitmap |= 1 << (X_Sn + 3) ;
+ /* FALL THROUGH */
+ case 7:
+ reg_bitmap |= 1 << (X_Sn + 2) ;
+ /* FALL THROUGH */
+ case 6:
+ reg_bitmap |= 1 << X_S1 ;
+ /* FALL THROUGH */
+ case 5:
+ reg_bitmap |= 1 << X_S0;
+ /* FALL THROUGH */
+ case 4:
+ reg_bitmap |= 1 << xRA;
+ break;
+ default:
+ break;
+ }
+
+ return reg_bitmap;
+}
+
+static bool gen_pop(DisasContext *ctx, arg_cmpp *a, bool ret, bool ret_val)
+{
+ REQUIRE_ZCMP(ctx);
+
+ uint32_t reg_bitmap = decode_push_pop_list(ctx, a->urlist);
+ if (reg_bitmap == 0) {
+ return false;
+ }
+
+ MemOp memop = get_ol(ctx) == MXL_RV32 ? MO_TEUL : MO_TEUQ;
+ int reg_size = memop_size(memop);
+ target_ulong stack_adj = ROUND_UP(ctpop32(reg_bitmap) * reg_size, 16) +
+ a->spimm;
+ TCGv sp = dest_gpr(ctx, xSP);
+ TCGv addr = tcg_temp_new();
+ int i;
+
+ tcg_gen_addi_tl(addr, sp, stack_adj - reg_size);
+
+ for (i = X_Sn + 11; i >= 0; i--) {
+ if (reg_bitmap & (1 << i)) {
+ TCGv dest = dest_gpr(ctx, i);
+ tcg_gen_qemu_ld_tl(dest, addr, ctx->mem_idx, memop);
+ gen_set_gpr(ctx, i, dest);
+ tcg_gen_subi_tl(addr, addr, reg_size);
+ }
+ }
+
+ tcg_gen_addi_tl(sp, sp, stack_adj);
+ gen_set_gpr(ctx, xSP, sp);
+
+ if (ret_val) {
+ gen_set_gpr(ctx, xA0, ctx->zero);
+ }
+
+ if (ret) {
+ TCGv ret_addr = get_gpr(ctx, xRA, EXT_NONE);
+ gen_set_pc(ctx, ret_addr);
+ tcg_gen_lookup_and_goto_ptr();
+ ctx->base.is_jmp = DISAS_NORETURN;
+ }
+
+ return true;
+}
+
+static bool trans_cm_push(DisasContext *ctx, arg_cm_push *a)
+{
+ REQUIRE_ZCMP(ctx);
+
+ uint32_t reg_bitmap = decode_push_pop_list(ctx, a->urlist);
+ if (reg_bitmap == 0) {
+ return false;
+ }
+
+ MemOp memop = get_ol(ctx) == MXL_RV32 ? MO_TEUL : MO_TEUQ;
+ int reg_size = memop_size(memop);
+ target_ulong stack_adj = ROUND_UP(ctpop32(reg_bitmap) * reg_size, 16) +
+ a->spimm;
+ TCGv sp = dest_gpr(ctx, xSP);
+ TCGv addr = tcg_temp_new();
+ int i;
+
+ tcg_gen_subi_tl(addr, sp, reg_size);
+
+ for (i = X_Sn + 11; i >= 0; i--) {
+ if (reg_bitmap & (1 << i)) {
+ TCGv val = get_gpr(ctx, i, EXT_NONE);
+ tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, memop);
+ tcg_gen_subi_tl(addr, addr, reg_size);
+ }
+ }
+
+ tcg_gen_subi_tl(sp, sp, stack_adj);
+ gen_set_gpr(ctx, xSP, sp);
+
+ return true;
+}
+
+static bool trans_cm_pop(DisasContext *ctx, arg_cm_pop *a)
+{
+ return gen_pop(ctx, a, false, false);
+}
+
+static bool trans_cm_popret(DisasContext *ctx, arg_cm_popret *a)
+{
+ return gen_pop(ctx, a, true, false);
+}
+
+static bool trans_cm_popretz(DisasContext *ctx, arg_cm_popret *a)
+{
+ return gen_pop(ctx, a, true, true);
+}
+
+static bool trans_cm_mva01s(DisasContext *ctx, arg_cm_mva01s *a)
+{
+ REQUIRE_ZCMP(ctx);
+
+ TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE);
+ TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE);
+
+ gen_set_gpr(ctx, xA0, src1);
+ gen_set_gpr(ctx, xA1, src2);
+
+ return true;
+}
+
+static bool trans_cm_mvsa01(DisasContext *ctx, arg_cm_mvsa01 *a)
+{
+ REQUIRE_ZCMP(ctx);
+
+ if (a->rs1 == a->rs2) {
+ return false;
+ }
+
+ TCGv a0 = get_gpr(ctx, xA0, EXT_NONE);
+ TCGv a1 = get_gpr(ctx, xA1, EXT_NONE);
+
+ gen_set_gpr(ctx, a->rs1, a0);
+ gen_set_gpr(ctx, a->rs2, a1);
+
+ return true;
+}