diff options
| author | Richard Henderson <richard.henderson@linaro.org> | 2023-04-23 11:20:36 +0100 |
|---|---|---|
| committer | Richard Henderson <richard.henderson@linaro.org> | 2023-04-23 11:20:37 +0100 |
| commit | 327ec8d6c2a2223b78d311153a471036e474c5c5 (patch) | |
| tree | 7c96b5cba009ec1ff6efd18d3977e86c7f0b17e6 /tcg/mips/tcg-target.c.inc | |
| parent | 6dd06214892d71cbbdd25daed7693e58afcb1093 (diff) | |
| parent | 3ea9be33400f14305565a9a094cb6031c07183d5 (diff) | |
| download | focaccia-qemu-327ec8d6c2a2223b78d311153a471036e474c5c5.tar.gz focaccia-qemu-327ec8d6c2a2223b78d311153a471036e474c5c5.zip | |
Merge tag 'pull-tcg-20230423' of https://gitlab.com/rth7680/qemu into staging
tcg cleanups: - Remove tcg_abort() - Split out extensions as known backend interfaces - Put the separate extensions together as tcg_out_movext - Introduce tcg_out_xchg as a backend interface - Clear TCGLabelQemuLdst on allocation - Avoid redundant extensions for riscv # -----BEGIN PGP SIGNATURE----- # # iQFRBAABCgA7FiEEekgeeIaLTbaoWgXAZN846K9+IV8FAmRE69sdHHJpY2hhcmQu # aGVuZGVyc29uQGxpbmFyby5vcmcACgkQZN846K9+IV/6jQf6Al9cgeJ6guVMpoRS # +sXaTs5U2yaqRvz5gGn2ANFuFgD2QanbWHjS5guTnhbsvq3icyOCpIXIPg/Z04LB # fTgAUCF5ut8U8C12HyGq/p4BFoTTWnCGPwY+PB9pMb5LiEcmaSUUz+fSA8xMX1b6 # EylI8YNd74A9j5PBNbGIXooj8llM71p9YztwQ9V7sPH3ZON4qbPRDgrJsb5TngMa # daTpGoW+A9UyG7z0Ie6UuiOyYAzeQqm64WmMlc7UYeb9lL+yxvCq4+MXH2V/SKqg # GLOF95DCdqj1EeZCOt0aN1ybZPcYFFkmpXrD1iLu0Mhy7Qo/vghX/eFoFnLleD+Y # yM+LTg== # =d2hZ # -----END PGP SIGNATURE----- # gpg: Signature made Sun 23 Apr 2023 09:27:07 AM BST # gpg: using RSA key 7A481E78868B4DB6A85A05C064DF38E8AF7E215F # gpg: issuer "richard.henderson@linaro.org" # gpg: Good signature from "Richard Henderson <richard.henderson@linaro.org>" [ultimate] * tag 'pull-tcg-20230423' of https://gitlab.com/rth7680/qemu: tcg/riscv: Conditionalize tcg_out_exts_i32_i64 tcg: Clear TCGLabelQemuLdst on allocation tcg: Introduce tcg_out_xchg tcg: Introduce tcg_out_movext tcg: Split out tcg_out_extrl_i64_i32 tcg: Split out tcg_out_extu_i32_i64 tcg: Split out tcg_out_exts_i32_i64 tcg: Split out tcg_out_ext32u tcg: Split out tcg_out_ext32s tcg: Split out tcg_out_ext16u tcg: Split out tcg_out_ext16s tcg: Split out tcg_out_ext8u tcg: Split out tcg_out_ext8s tcg: Replace tcg_abort with g_assert_not_reached tcg: Replace if + tcg_abort with tcg_debug_assert Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'tcg/mips/tcg-target.c.inc')
| -rw-r--r-- | tcg/mips/tcg-target.c.inc | 94 |
1 files changed, 69 insertions, 25 deletions
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc index 80748d892e..346c614354 100644 --- a/tcg/mips/tcg-target.c.inc +++ b/tcg/mips/tcg-target.c.inc @@ -552,6 +552,54 @@ static void tcg_out_movi(TCGContext *s, TCGType type, } } +static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) +{ + tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32); + tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs); +} + +static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xff); +} + +static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs) +{ + tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32); + tcg_out_opc_reg(s, OPC_SEH, rd, TCG_REG_ZERO, rs); +} + +static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xffff); +} + +static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_debug_assert(TCG_TARGET_REG_BITS == 64); + tcg_out_opc_sa(s, OPC_SLL, rd, rs, 0); +} + +static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_out_ext32s(s, rd, rs); +} + +static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_out_ext32u(s, rd, rs); +} + +static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs) +{ + tcg_out_ext32s(s, rd, rs); +} + +static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2) +{ + return false; +} + static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs, tcg_target_long imm) { @@ -635,6 +683,7 @@ static void tcg_out_bswap64(TCGContext *s, TCGReg ret, TCGReg arg) static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg) { + tcg_debug_assert(TCG_TARGET_REG_BITS == 64); if (use_mips32r2_instructions) { tcg_out_opc_bf(s, OPC_DEXT, ret, arg, 31, 0); } else { @@ -798,7 +847,7 @@ static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret, break; default: - tcg_abort(); + g_assert_not_reached(); break; } } @@ -855,7 +904,7 @@ static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1, break; default: - tcg_abort(); + g_assert_not_reached(); break; } @@ -1093,7 +1142,7 @@ static int tcg_out_call_iarg_reg8(TCGContext *s, int i, TCGReg arg) if (i < ARRAY_SIZE(tcg_target_call_iarg_regs)) { tmp = tcg_target_call_iarg_regs[i]; } - tcg_out_opc_imm(s, OPC_ANDI, tmp, arg, 0xff); + tcg_out_ext8u(s, tmp, arg); return tcg_out_call_iarg_reg(s, i, tmp); } @@ -1291,7 +1340,7 @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l) /* delay slot */ if (TCG_TARGET_REG_BITS == 64 && l->type == TCG_TYPE_I32) { /* we always sign-extend 32-bit loads */ - tcg_out_opc_sa(s, OPC_SLL, v0, TCG_REG_V0, 0); + tcg_out_ext32s(s, v0, TCG_REG_V0); } else { tcg_out_opc_reg(s, OPC_OR, v0, TCG_REG_V0, TCG_REG_ZERO); } @@ -1337,7 +1386,7 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l) } break; default: - tcg_abort(); + g_assert_not_reached(); } i = tcg_out_call_iarg_imm(s, i, oi); @@ -1527,7 +1576,7 @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg lo, TCGReg hi, } break; default: - tcg_abort(); + g_assert_not_reached(); } } @@ -1775,7 +1824,7 @@ static void tcg_out_qemu_st_direct(TCGContext *s, TCGReg lo, TCGReg hi, break; default: - tcg_abort(); + g_assert_not_reached(); } } @@ -1848,7 +1897,7 @@ static void tcg_out_qemu_st_unalign(TCGContext *s, TCGReg lo, TCGReg hi, break; default: - tcg_abort(); + g_assert_not_reached(); } } static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64) @@ -2245,13 +2294,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, case INDEX_op_not_i64: i1 = OPC_NOR; goto do_unary; - case INDEX_op_ext8s_i32: - case INDEX_op_ext8s_i64: - i1 = OPC_SEB; - goto do_unary; - case INDEX_op_ext16s_i32: - case INDEX_op_ext16s_i64: - i1 = OPC_SEH; do_unary: tcg_out_opc_reg(s, i1, a0, TCG_REG_ZERO, a1); break; @@ -2272,15 +2314,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, case INDEX_op_extrh_i64_i32: tcg_out_dsra(s, a0, a1, 32); break; - case INDEX_op_ext32s_i64: - case INDEX_op_ext_i32_i64: - case INDEX_op_extrl_i64_i32: - tcg_out_opc_sa(s, OPC_SLL, a0, a1, 0); - break; - case INDEX_op_ext32u_i64: - case INDEX_op_extu_i32_i64: - tcg_out_ext32u(s, a0, a1); - break; case INDEX_op_sar_i32: i1 = OPC_SRAV, i2 = OPC_SRA; @@ -2419,8 +2452,19 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, case INDEX_op_call: /* Always emitted via tcg_out_call. */ case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */ case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */ + case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */ + case INDEX_op_ext8s_i64: + case INDEX_op_ext8u_i32: + case INDEX_op_ext8u_i64: + case INDEX_op_ext16s_i32: + case INDEX_op_ext16s_i64: + case INDEX_op_ext32s_i64: + case INDEX_op_ext32u_i64: + case INDEX_op_ext_i32_i64: + case INDEX_op_extu_i32_i64: + case INDEX_op_extrl_i64_i32: default: - tcg_abort(); + g_assert_not_reached(); } } |