qemu with hax to log dma reads & writes jcs.org/2018/11/12/vfio

target/i386: remove gen_io_end

Force the end of a translation block after an I/O instruction in
icount mode. For consistency, all CF_USE_ICOUNT code is kept in
disas_insn instead of having it in gen_ins and gen_outs.

Reviewed-by: Richard Henderson <richard.henderson@linaro.org>
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>

+9 -13
+9 -13
target/i386/translate.c
··· 1128 1128 1129 1129 static inline void gen_ins(DisasContext *s, MemOp ot) 1130 1130 { 1131 - if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 1132 - gen_io_start(); 1133 - } 1134 1131 gen_string_movl_A0_EDI(s); 1135 1132 /* Note: we must do this dummy write first to be restartable in 1136 1133 case of page fault. */ ··· 1143 1140 gen_op_movl_T0_Dshift(s, ot); 1144 1141 gen_op_add_reg_T0(s, s->aflag, R_EDI); 1145 1142 gen_bpt_io(s, s->tmp2_i32, ot); 1146 - if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 1147 - gen_io_end(); 1148 - } 1149 1143 } 1150 1144 1151 1145 static inline void gen_outs(DisasContext *s, MemOp ot) 1152 1146 { 1153 - if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 1154 - gen_io_start(); 1155 - } 1156 1147 gen_string_movl_A0_ESI(s); 1157 1148 gen_op_ld_v(s, ot, s->T0, s->A0); 1158 1149 ··· 1163 1154 gen_op_movl_T0_Dshift(s, ot); 1164 1155 gen_op_add_reg_T0(s, s->aflag, R_ESI); 1165 1156 gen_bpt_io(s, s->tmp2_i32, ot); 1166 - if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 1167 - gen_io_end(); 1168 - } 1169 1157 } 1170 1158 1171 1159 /* same method as Valgrind : we generate jumps to current or next ··· 6400 6388 tcg_gen_ext16u_tl(s->T0, cpu_regs[R_EDX]); 6401 6389 gen_check_io(s, ot, pc_start - s->cs_base, 6402 6390 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4); 6391 + if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 6392 + gen_io_start(); 6393 + } 6403 6394 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) { 6404 6395 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base); 6396 + /* jump generated by gen_repz_ins */ 6405 6397 } else { 6406 6398 gen_ins(s, ot); 6407 6399 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { ··· 6415 6407 tcg_gen_ext16u_tl(s->T0, cpu_regs[R_EDX]); 6416 6408 gen_check_io(s, ot, pc_start - s->cs_base, 6417 6409 svm_is_rep(prefixes) | 4); 6410 + if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 6411 + gen_io_start(); 6412 + } 6418 6413 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) { 6419 6414 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base); 6415 + /* jump generated by gen_repz_outs */ 6420 6416 } else { 6421 6417 gen_outs(s, ot); 6422 6418 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { ··· 8039 8035 gen_helper_read_crN(s->T0, cpu_env, tcg_const_i32(reg)); 8040 8036 gen_op_mov_reg_v(s, ot, rm, s->T0); 8041 8037 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) { 8042 - gen_io_end(); 8038 + gen_jmp(s, s->pc - s->cs_base); 8043 8039 } 8044 8040 } 8045 8041 break;