@@ -92,12 +92,18 @@ static int op_bits(int op)
case INDEX_op_add_i32:
case INDEX_op_sub_i32:
case INDEX_op_mul_i32:
+ case INDEX_op_and_i32:
+ case INDEX_op_or_i32:
+ case INDEX_op_xor_i32:
return 32;
#if TCG_TARGET_REG_BITS == 64
case INDEX_op_mov_i64:
case INDEX_op_add_i64:
case INDEX_op_sub_i64:
case INDEX_op_mul_i64:
+ case INDEX_op_and_i64:
+ case INDEX_op_or_i64:
+ case INDEX_op_xor_i64:
return 64;
#endif
default:
@@ -153,6 +159,24 @@ static TCGArg do_constant_folding_2(int op, TCGArg x, TCGArg y)
#endif
return x * y;
+ case INDEX_op_and_i32:
+#if TCG_TARGET_REG_BITS == 64
+ case INDEX_op_and_i64:
+#endif
+ return x & y;
+
+ case INDEX_op_or_i32:
+#if TCG_TARGET_REG_BITS == 64
+ case INDEX_op_or_i64:
+#endif
+ return x | y;
+
+ case INDEX_op_xor_i32:
+#if TCG_TARGET_REG_BITS == 64
+ case INDEX_op_xor_i64:
+#endif
+ return x ^ y;
+
default:
fprintf(stderr,
"Unrecognized operation %d in do_constant_folding.\n", op);
@@ -272,6 +296,32 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
continue;
}
break;
+ case INDEX_op_or_i32:
+ case INDEX_op_and_i32:
+#if TCG_TARGET_REG_BITS == 64
+ case INDEX_op_and_i64:
+ case INDEX_op_or_i64:
+#endif
+ if (args[1] == args[2]) {
+ if (args[1] == args[0]) {
+ args += 3;
+ gen_opc_buf[op_index] = INDEX_op_nop;
+ } else {
+ reset_temp(temps, args[0], nb_temps, nb_globals);
+ if (args[1] >= s->nb_globals) {
+ temps[args[0]].state = TCG_TEMP_COPY;
+ temps[args[0]].val = args[1];
+ assert(temps[args[0]].num_copies == 0);
+ }
+ gen_opc_buf[op_index] = op_to_mov(op);
+ gen_args[0] = args[0];
+ gen_args[1] = args[1];
+ gen_args += 2;
+ args += 3;
+ }
+ continue;
+ }
+ break;
}
/* Propagate constants through copy operations and do constant
@@ -321,10 +371,16 @@ static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
gen_args += 2;
args += 2;
break;
+ case INDEX_op_or_i32:
+ case INDEX_op_and_i32:
+ case INDEX_op_xor_i32:
case INDEX_op_add_i32:
case INDEX_op_sub_i32:
case INDEX_op_mul_i32:
#if TCG_TARGET_REG_BITS == 64
+ case INDEX_op_and_i64:
+ case INDEX_op_or_i64:
+ case INDEX_op_xor_i64:
case INDEX_op_add_i64:
case INDEX_op_sub_i64:
case INDEX_op_mul_i64:
Perform constant folding for AND, OR, XOR operations. Signed-off-by: Kirill Batuzov <batuzovk@ispras.ru> --- tcg/optimize.c | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 files changed, 56 insertions(+), 0 deletions(-)