@@ -460,6 +460,24 @@ enum {
R6_OPC_SCD = 0x27 | OPC_SPECIAL3,
};
+/* Loongson EXT LDC2/SDC2 opcodes */
+#define MASK_LOONGSON_LSDC2(op) (MASK_OP_MAJOR(op) | (op & 0x7))
+
+enum {
+ OPC_GSLBX = 0x0 | OPC_LDC2,
+ OPC_GSLHX = 0x1 | OPC_LDC2,
+ OPC_GSLWX = 0x2 | OPC_LDC2,
+ OPC_GSLDX = 0x3 | OPC_LDC2,
+ OPC_GSLWXC1 = 0x6 | OPC_LDC2,
+ OPC_GSLDXC1 = 0x7 | OPC_LDC2,
+ OPC_GSSBX = 0x0 | OPC_SDC2,
+ OPC_GSSHX = 0x1 | OPC_SDC2,
+ OPC_GSSWX = 0x2 | OPC_SDC2,
+ OPC_GSSDX = 0x3 | OPC_SDC2,
+ OPC_GSSWXC1 = 0x6 | OPC_SDC2,
+ OPC_GSSDXC1 = 0x7 | OPC_SDC2,
+};
+
/* BSHFL opcodes */
#define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
@@ -5910,6 +5928,165 @@ no_rd:
tcg_temp_free_i64(t1);
}
+/* Loongson EXT LDC2/SDC2 */
+static void gen_loongson_lsdc2(DisasContext *ctx, int rt,
+ int rs, int rd)
+{
+ int offset = (int8_t)(ctx->opcode >> 3);
+ uint32_t opc = MASK_LOONGSON_LSDC2(ctx->opcode);
+ TCGv t0, t1;
+ TCGv_i32 fp0;
+
+ /* Pre-conditions */
+ switch (opc) {
+ case OPC_GSLBX:
+ case OPC_GSLHX:
+ case OPC_GSLWX:
+ case OPC_GSLDX:
+ /* prefetch, implement as NOP */
+ if (rt == 0) {
+ return;
+ }
+ break;
+ case OPC_GSSBX:
+ case OPC_GSSHX:
+ case OPC_GSSWX:
+ case OPC_GSSDX:
+ break;
+ case OPC_GSLWXC1:
+#if defined(TARGET_MIPS64)
+ case OPC_GSLDXC1:
+#endif
+ check_cp1_enabled(ctx);
+ /* prefetch, implement as NOP */
+ if (rt == 0) {
+ return;
+ }
+ break;
+ case OPC_GSSWXC1:
+#if defined(TARGET_MIPS64)
+ case OPC_GSSDXC1:
+#endif
+ check_cp1_enabled(ctx);
+ break;
+ default:
+ MIPS_INVAL("loongson_lsdc2");
+ generate_exception_end(ctx, EXCP_RI);
+ return;
+ break;
+ }
+
+ t0 = tcg_temp_new();
+
+ gen_base_offset_addr(ctx, t0, rs, offset);
+ gen_op_addr_add(ctx, t0, cpu_gpr[rd], t0);
+
+ switch (opc) {
+ case OPC_GSLBX:
+ tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_SB);
+ gen_store_gpr(t0, rt);
+ break;
+ case OPC_GSLHX:
+ tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESW |
+ ctx->default_tcg_memop_mask);
+ gen_store_gpr(t0, rt);
+ break;
+ case OPC_GSLWX:
+ gen_base_offset_addr(ctx, t0, rs, offset);
+ if (rd) {
+ gen_op_addr_add(ctx, t0, cpu_gpr[rd], t0);
+ }
+ tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESL |
+ ctx->default_tcg_memop_mask);
+ gen_store_gpr(t0, rt);
+ break;
+#if defined(TARGET_MIPS64)
+ case OPC_GSLDX:
+ gen_base_offset_addr(ctx, t0, rs, offset);
+ if (rd) {
+ gen_op_addr_add(ctx, t0, cpu_gpr[rd], t0);
+ }
+ tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TEQ |
+ ctx->default_tcg_memop_mask);
+ gen_store_gpr(t0, rt);
+ break;
+#endif
+ case OPC_GSLWXC1:
+ check_cp1_enabled(ctx);
+ gen_base_offset_addr(ctx, t0, rs, offset);
+ if (rd) {
+ gen_op_addr_add(ctx, t0, cpu_gpr[rd], t0);
+ }
+ fp0 = tcg_temp_new_i32();
+ tcg_gen_qemu_ld_i32(fp0, t0, ctx->mem_idx, MO_TESL |
+ ctx->default_tcg_memop_mask);
+ gen_store_fpr32(ctx, fp0, rt);
+ tcg_temp_free_i32(fp0);
+ break;
+#if defined(TARGET_MIPS64)
+ case OPC_GSLDXC1:
+ check_cp1_enabled(ctx);
+ gen_base_offset_addr(ctx, t0, rs, offset);
+ if (rd) {
+ gen_op_addr_add(ctx, t0, cpu_gpr[rd], t0);
+ }
+ tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TEQ |
+ ctx->default_tcg_memop_mask);
+ gen_store_fpr64(ctx, t0, rt);
+ break;
+#endif
+ case OPC_GSSBX:
+ t1 = tcg_temp_new();
+ gen_load_gpr(t1, rt);
+ tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_SB);
+ tcg_temp_free(t1);
+ break;
+ case OPC_GSSHX:
+ t1 = tcg_temp_new();
+ gen_load_gpr(t1, rt);
+ tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUW |
+ ctx->default_tcg_memop_mask);
+ tcg_temp_free(t1);
+ break;
+ case OPC_GSSWX:
+ t1 = tcg_temp_new();
+ gen_load_gpr(t1, rt);
+ tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL |
+ ctx->default_tcg_memop_mask);
+ tcg_temp_free(t1);
+ break;
+#if defined(TARGET_MIPS64)
+ case OPC_GSSDX:
+ t1 = tcg_temp_new();
+ gen_load_gpr(t1, rt);
+ tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEQ |
+ ctx->default_tcg_memop_mask);
+ tcg_temp_free(t1);
+ break;
+#endif
+ case OPC_GSSWXC1:
+ fp0 = tcg_temp_new_i32();
+ gen_load_fpr32(ctx, fp0, rt);
+ tcg_gen_qemu_st_i32(fp0, t0, ctx->mem_idx, MO_TEUL |
+ ctx->default_tcg_memop_mask);
+ tcg_temp_free_i32(fp0);
+ break;
+#if defined(TARGET_MIPS64)
+ case OPC_GSSDXC1:
+ t1 = tcg_temp_new();
+ gen_load_fpr64(ctx, t1, rt);
+ tcg_gen_qemu_st_i64(t1, t0, ctx->mem_idx, MO_TEQ |
+ ctx->default_tcg_memop_mask);
+ tcg_temp_free(t1);
+ break;
+#endif
+ default:
+ break;
+ }
+
+ tcg_temp_free(t0);
+}
+
/* Traps */
static void gen_trap(DisasContext *ctx, uint32_t opc,
int rs, int rt, int16_t imm)
@@ -30799,6 +30976,8 @@ static void decode_opc(CPUMIPSState *env, DisasContext *ctx)
/* OPC_JIC, OPC_JIALC */
gen_compute_compact_branch(ctx, op, 0, rt, imm);
}
+ } else if (ctx->insn_flags & ASE_LEXT) {
+ gen_loongson_lsdc2(ctx, rt, rs, rd);
} else {
/* OPC_LWC2, OPC_SWC2 */
/* COP2: Not implemented. */
LDC2/SDC2 opcodes have been rewritten as "load & store with offset" group of instructions by loongson-ext ASE. Signed-off-by: Jiaxun Yang <jiaxun.yang@flygoat.com> --- target/mips/translate.c | 179 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 179 insertions(+)