@@ -540,9 +540,11 @@ static void tcg_out_br(TCGContext *s, int opc, int ra, int label_index)
tcg_out_fmt_br(s, opc, ra, value);
}
-static void tcg_out_brcond(TCGContext *s, int cond, TCGArg arg1,
+static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
TCGArg arg2, int const_arg2, int label_index)
{
+ /* Note that unsigned comparisons are not present here, which means
+ that their entries will contain zeros. */
static const int br_opc[10] = {
[TCG_COND_EQ] = INSN_BEQ,
[TCG_COND_NE] = INSN_BNE,
@@ -552,38 +554,56 @@ static void tcg_out_brcond(TCGContext *s, int cond, TCGArg arg1,
[TCG_COND_GT] = INSN_BGT
};
- static const uint64_t cmp_opc[10] = {
+ /* The low bit of these entries indicates that the result of
+ the comparison must be inverted. This bit should not be
+ output with the rest of the instruction. */
+ static const int cmp_opc[10] = {
[TCG_COND_EQ] = INSN_CMPEQ,
- [TCG_COND_NE] = INSN_CMPEQ,
+ [TCG_COND_NE] = INSN_CMPEQ | 1,
[TCG_COND_LT] = INSN_CMPLT,
- [TCG_COND_GE] = INSN_CMPLT,
+ [TCG_COND_GE] = INSN_CMPLT | 1,
[TCG_COND_LE] = INSN_CMPLE,
- [TCG_COND_GT] = INSN_CMPLE,
+ [TCG_COND_GT] = INSN_CMPLE | 1,
[TCG_COND_LTU] = INSN_CMPULT,
- [TCG_COND_GEU] = INSN_CMPULT,
+ [TCG_COND_GEU] = INSN_CMPULT | 1,
[TCG_COND_LEU] = INSN_CMPULE,
- [TCG_COND_GTU] = INSN_CMPULE
+ [TCG_COND_GTU] = INSN_CMPULE | 1
};
int opc = 0;
- if (const_arg2) {
- if (arg2 == 0) {
- opc = br_opc[cond];
- } else if (cond == TCG_COND_EQ || cond == TCG_COND_NE) {
- tcg_out_mem_long(s, INSN_LDA, TMP_REG1, arg1, -arg2);
- opc = (cond == TCG_COND_EQ ? INSN_BEQ : INSN_BNE);
- }
+ /* Possible improvements:
+ (1) Notice arg1 == $31 and !const_arg2. In this case, swap the
+ two operands and swap the sense of the comparison to allow the
+ use of the direct branches.
+
+ (2) Allow arbitrary constants. We can, on occasion, generate one
+ less instruction if we compute
+ TMP = ARG1 - CONST
+ instead of
+ TMP = ARG1 cmp TMP2
+ where TMP2 is the constant loaded into a register by generic code.
+ Note that for 64-bit operands this works only for EQ and NE. For
+ 32-bit operands, we would need to either limit this to signed
+ comparisons or properly zero-extend unsigned inputs. The payoff
+ here isn't great though; much less than(1). */
+
+ /* Notice signed comparisons vs zero. These are handled by the
+ branch instructions directly. */
+ if (const_arg2 && arg2 == 0) {
+ opc = br_opc[cond];
}
+ /* Otherwise, generate a comparison into a temporary. */
if (opc == 0) {
- opc = cmp_opc[cond];
+ opc = cmp_opc[cond] & ~1;
if (const_arg2) {
tcg_out_fmt_opi(s, opc, arg1, arg2, TMP_REG1);
} else {
tcg_out_fmt_opr(s, opc, arg1, arg2, TMP_REG1);
}
- opc = (cond & 1) ? INSN_BEQ : INSN_BNE;
+
+ opc = (cmp_opc[cond] & 1 ? INSN_BEQ : INSN_BNE);
arg1 = TMP_REG1;
}