diff options
Diffstat (limited to 'tcg/tcg-op-ldst.c')
| -rw-r--r-- | tcg/tcg-op-ldst.c | 103 |
1 files changed, 22 insertions, 81 deletions
diff --git a/tcg/tcg-op-ldst.c b/tcg/tcg-op-ldst.c index 77271e0193..73838e2701 100644 --- a/tcg/tcg-op-ldst.c +++ b/tcg/tcg-op-ldst.c @@ -91,25 +91,10 @@ static MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st) static void gen_ldst(TCGOpcode opc, TCGType type, TCGTemp *vl, TCGTemp *vh, TCGTemp *addr, MemOpIdx oi) { - if (TCG_TARGET_REG_BITS == 64 || tcg_ctx->addr_type == TCG_TYPE_I32) { - if (vh) { - tcg_gen_op4(opc, type, temp_arg(vl), temp_arg(vh), - temp_arg(addr), oi); - } else { - tcg_gen_op3(opc, type, temp_arg(vl), temp_arg(addr), oi); - } + if (vh) { + tcg_gen_op4(opc, type, temp_arg(vl), temp_arg(vh), temp_arg(addr), oi); } else { - /* See TCGV_LOW/HIGH. */ - TCGTemp *al = addr + HOST_BIG_ENDIAN; - TCGTemp *ah = addr + !HOST_BIG_ENDIAN; - - if (vh) { - tcg_gen_op5(opc, type, temp_arg(vl), temp_arg(vh), - temp_arg(al), temp_arg(ah), oi); - } else { - tcg_gen_op4(opc, type, temp_arg(vl), - temp_arg(al), temp_arg(ah), oi); - } + tcg_gen_op3(opc, type, temp_arg(vl), temp_arg(addr), oi); } } @@ -232,7 +217,6 @@ static void tcg_gen_qemu_ld_i32_int(TCGv_i32 val, TCGTemp *addr, MemOp orig_memop; MemOpIdx orig_oi, oi; TCGv_i64 copy_addr; - TCGOpcode opc; tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD); orig_memop = memop = tcg_canonicalize_memop(memop, 0, 0); @@ -248,12 +232,8 @@ static void tcg_gen_qemu_ld_i32_int(TCGv_i32 val, TCGTemp *addr, } copy_addr = plugin_maybe_preserve_addr(addr); - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_ld_a32_i32; - } else { - opc = INDEX_op_qemu_ld_a64_i32; - } - gen_ldst(opc, TCG_TYPE_I32, tcgv_i32_temp(val), NULL, addr, oi); + gen_ldst(INDEX_op_qemu_ld_i32, TCG_TYPE_I32, + tcgv_i32_temp(val), NULL, addr, oi); plugin_gen_mem_callbacks_i32(val, copy_addr, addr, orig_oi, QEMU_PLUGIN_MEM_R); @@ -310,17 +290,9 @@ static void tcg_gen_qemu_st_i32_int(TCGv_i32 val, TCGTemp *addr, } if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) { - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_st8_a32_i32; - } else { - opc = INDEX_op_qemu_st8_a64_i32; - } + opc = INDEX_op_qemu_st8_i32; } else { - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_st_a32_i32; - } else { - opc = INDEX_op_qemu_st_a64_i32; - } + opc = INDEX_op_qemu_st_i32; } gen_ldst(opc, TCG_TYPE_I32, tcgv_i32_temp(val), NULL, addr, oi); plugin_gen_mem_callbacks_i32(val, NULL, addr, orig_oi, QEMU_PLUGIN_MEM_W); @@ -344,7 +316,6 @@ static void tcg_gen_qemu_ld_i64_int(TCGv_i64 val, TCGTemp *addr, MemOp orig_memop; MemOpIdx orig_oi, oi; TCGv_i64 copy_addr; - TCGOpcode opc; if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) { tcg_gen_qemu_ld_i32_int(TCGV_LOW(val), addr, idx, memop); @@ -370,12 +341,7 @@ static void tcg_gen_qemu_ld_i64_int(TCGv_i64 val, TCGTemp *addr, } copy_addr = plugin_maybe_preserve_addr(addr); - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_ld_a32_i64; - } else { - opc = INDEX_op_qemu_ld_a64_i64; - } - gen_ldst_i64(opc, val, addr, oi); + gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, oi); plugin_gen_mem_callbacks_i64(val, copy_addr, addr, orig_oi, QEMU_PLUGIN_MEM_R); @@ -412,7 +378,6 @@ static void tcg_gen_qemu_st_i64_int(TCGv_i64 val, TCGTemp *addr, { TCGv_i64 swap = NULL; MemOpIdx orig_oi, oi; - TCGOpcode opc; if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) { tcg_gen_qemu_st_i32_int(TCGV_LOW(val), addr, idx, memop); @@ -443,12 +408,7 @@ static void tcg_gen_qemu_st_i64_int(TCGv_i64 val, TCGTemp *addr, oi = make_memop_idx(memop, idx); } - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_st_a32_i64; - } else { - opc = INDEX_op_qemu_st_a64_i64; - } - gen_ldst_i64(opc, val, addr, oi); + gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, oi); plugin_gen_mem_callbacks_i64(val, NULL, addr, orig_oi, QEMU_PLUGIN_MEM_W); if (swap) { @@ -560,7 +520,6 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr, { MemOpIdx orig_oi; TCGv_i64 ext_addr = NULL; - TCGOpcode opc; check_max_alignment(memop_alignment_bits(memop)); tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD); @@ -588,12 +547,7 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr, hi = TCGV128_HIGH(val); } - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_ld_a32_i128; - } else { - opc = INDEX_op_qemu_ld_a64_i128; - } - gen_ldst(opc, TCG_TYPE_I128, tcgv_i64_temp(lo), + gen_ldst(INDEX_op_qemu_ld_i128, TCG_TYPE_I128, tcgv_i64_temp(lo), tcgv_i64_temp(hi), addr, oi); if (need_bswap) { @@ -609,12 +563,6 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr, canonicalize_memop_i128_as_i64(mop, memop); need_bswap = (mop[0] ^ memop) & MO_BSWAP; - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_ld_a32_i64; - } else { - opc = INDEX_op_qemu_ld_a64_i64; - } - /* * Since there are no global TCGv_i128, there is no visible state * changed if the second load faults. Load directly into the two @@ -628,7 +576,8 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr, y = TCGV128_LOW(val); } - gen_ldst_i64(opc, x, addr, make_memop_idx(mop[0], idx)); + gen_ldst_i64(INDEX_op_qemu_ld_i64, x, addr, + make_memop_idx(mop[0], idx)); if (need_bswap) { tcg_gen_bswap64_i64(x, x); @@ -644,7 +593,8 @@ static void tcg_gen_qemu_ld_i128_int(TCGv_i128 val, TCGTemp *addr, addr_p8 = tcgv_i64_temp(t); } - gen_ldst_i64(opc, y, addr_p8, make_memop_idx(mop[1], idx)); + gen_ldst_i64(INDEX_op_qemu_ld_i64, y, addr_p8, + make_memop_idx(mop[1], idx)); tcg_temp_free_internal(addr_p8); if (need_bswap) { @@ -678,7 +628,6 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr, { MemOpIdx orig_oi; TCGv_i64 ext_addr = NULL; - TCGOpcode opc; check_max_alignment(memop_alignment_bits(memop)); tcg_gen_req_mo(TCG_MO_ST_LD | TCG_MO_ST_ST); @@ -709,13 +658,8 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr, hi = TCGV128_HIGH(val); } - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_st_a32_i128; - } else { - opc = INDEX_op_qemu_st_a64_i128; - } - gen_ldst(opc, TCG_TYPE_I128, tcgv_i64_temp(lo), - tcgv_i64_temp(hi), addr, oi); + gen_ldst(INDEX_op_qemu_st_i128, TCG_TYPE_I128, + tcgv_i64_temp(lo), tcgv_i64_temp(hi), addr, oi); if (need_bswap) { tcg_temp_free_i64(lo); @@ -728,12 +672,6 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr, canonicalize_memop_i128_as_i64(mop, memop); - if (tcg_ctx->addr_type == TCG_TYPE_I32) { - opc = INDEX_op_qemu_st_a32_i64; - } else { - opc = INDEX_op_qemu_st_a64_i64; - } - if ((memop & MO_BSWAP) == MO_LE) { x = TCGV128_LOW(val); y = TCGV128_HIGH(val); @@ -748,7 +686,8 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr, x = b; } - gen_ldst_i64(opc, x, addr, make_memop_idx(mop[0], idx)); + gen_ldst_i64(INDEX_op_qemu_st_i64, x, addr, + make_memop_idx(mop[0], idx)); if (tcg_ctx->addr_type == TCG_TYPE_I32) { TCGv_i32 t = tcg_temp_ebb_new_i32(); @@ -762,10 +701,12 @@ static void tcg_gen_qemu_st_i128_int(TCGv_i128 val, TCGTemp *addr, if (b) { tcg_gen_bswap64_i64(b, y); - gen_ldst_i64(opc, b, addr_p8, make_memop_idx(mop[1], idx)); + gen_ldst_i64(INDEX_op_qemu_st_i64, b, addr_p8, + make_memop_idx(mop[1], idx)); tcg_temp_free_i64(b); } else { - gen_ldst_i64(opc, y, addr_p8, make_memop_idx(mop[1], idx)); + gen_ldst_i64(INDEX_op_qemu_st_i64, y, addr_p8, + make_memop_idx(mop[1], idx)); } tcg_temp_free_internal(addr_p8); } else { |