x86_emulate.c 54.6 KB
Newer Older
A
Avi Kivity 已提交
1 2 3 4 5 6 7 8
/******************************************************************************
 * x86_emulate.c
 *
 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
 *
 * Copyright (c) 2005 Keir Fraser
 *
 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9
 * privileged instructions:
A
Avi Kivity 已提交
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
 *
 * Copyright (C) 2006 Qumranet
 *
 *   Avi Kivity <avi@qumranet.com>
 *   Yaniv Kamay <yaniv@qumranet.com>
 *
 * This work is licensed under the terms of the GNU GPL, version 2.  See
 * the COPYING file in the top-level directory.
 *
 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
 */

#ifndef __KERNEL__
#include <stdio.h>
#include <stdint.h>
#include <public/xen.h>
M
Mike Day 已提交
26
#define DPRINTF(_f, _a ...) printf(_f , ## _a)
A
Avi Kivity 已提交
27
#else
28
#include <linux/kvm_host.h>
29
#include "kvm_cache_regs.h"
A
Avi Kivity 已提交
30 31 32
#define DPRINTF(x...) do {} while (0)
#endif
#include <linux/module.h>
33
#include <asm/kvm_x86_emulate.h>
A
Avi Kivity 已提交
34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64

/*
 * Opcode effective-address decode tables.
 * Note that we only emulate instructions that have at least one memory
 * operand (excluding implicit stack references). We assume that stack
 * references and instruction fetches will never occur in special memory
 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
 * not be handled.
 */

/* Operand sizes: 8-bit operands or specified/overridden size. */
#define ByteOp      (1<<0)	/* 8-bit operands. */
/* Destination operand type. */
#define ImplicitOps (1<<1)	/* Implicit in opcode. No generic decode. */
#define DstReg      (2<<1)	/* Register operand. */
#define DstMem      (3<<1)	/* Memory operand. */
#define DstMask     (3<<1)
/* Source operand type. */
#define SrcNone     (0<<3)	/* No source operand. */
#define SrcImplicit (0<<3)	/* Source operand is implicit in the opcode. */
#define SrcReg      (1<<3)	/* Register operand. */
#define SrcMem      (2<<3)	/* Memory operand. */
#define SrcMem16    (3<<3)	/* Memory operand (16-bit). */
#define SrcMem32    (4<<3)	/* Memory operand (32-bit). */
#define SrcImm      (5<<3)	/* Immediate operand. */
#define SrcImmByte  (6<<3)	/* 8-bit sign-extended immediate operand. */
#define SrcMask     (7<<3)
/* Generic ModRM decode. */
#define ModRM       (1<<6)
/* Destination is only written; never read. */
#define Mov         (1<<7)
65
#define BitOp       (1<<8)
66
#define MemAbs      (1<<9)      /* Memory operand is absolute displacement */
67
#define String      (1<<10)     /* String instruction (rep capable) */
68
#define Stack       (1<<11)     /* Stack instruction (push/pop) */
69 70 71
#define Group       (1<<14)     /* Bits 3:5 of modrm byte extend opcode */
#define GroupDual   (1<<15)     /* Alternate decoding of mod == 3 */
#define GroupMask   0xff        /* Group number stored in bits 0:7 */
A
Avi Kivity 已提交
72

73
enum {
74
	Group1_80, Group1_81, Group1_82, Group1_83,
75
	Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
76 77
};

78
static u16 opcode_table[256] = {
A
Avi Kivity 已提交
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97
	/* 0x00 - 0x07 */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x08 - 0x0F */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x10 - 0x17 */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x18 - 0x1F */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x20 - 0x27 */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98
	SrcImmByte, SrcImm, 0, 0,
A
Avi Kivity 已提交
99 100 101 102 103 104 105 106 107 108 109 110
	/* 0x28 - 0x2F */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x30 - 0x37 */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
	/* 0x38 - 0x3F */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
	0, 0, 0, 0,
111
	/* 0x40 - 0x47 */
112
	DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
113
	/* 0x48 - 0x4F */
114
	DstReg, DstReg, DstReg, DstReg,	DstReg, DstReg, DstReg, DstReg,
115
	/* 0x50 - 0x57 */
116 117
	SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
	SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
118
	/* 0x58 - 0x5F */
119 120
	DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
	DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
N
Nitin A Kamble 已提交
121
	/* 0x60 - 0x67 */
A
Avi Kivity 已提交
122
	0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
N
Nitin A Kamble 已提交
123 124
	0, 0, 0, 0,
	/* 0x68 - 0x6F */
125
	SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
126 127
	SrcNone  | ByteOp  | ImplicitOps, SrcNone  | ImplicitOps, /* insb, insw/insd */
	SrcNone  | ByteOp  | ImplicitOps, SrcNone  | ImplicitOps, /* outsb, outsw/outsd */
128 129 130 131 132 133
	/* 0x70 - 0x77 */
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	/* 0x78 - 0x7F */
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
A
Avi Kivity 已提交
134
	/* 0x80 - 0x87 */
135 136
	Group | Group1_80, Group | Group1_81,
	Group | Group1_82, Group | Group1_83,
A
Avi Kivity 已提交
137 138 139 140 141
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
	/* 0x88 - 0x8F */
	ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
	ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
142
	DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
143
	DstReg | SrcMem | ModRM | Mov, Group | Group1A,
144 145 146
	/* 0x90 - 0x97 */
	DstReg, DstReg, DstReg, DstReg,	DstReg, DstReg, DstReg, DstReg,
	/* 0x98 - 0x9F */
147
	0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
A
Avi Kivity 已提交
148
	/* 0xA0 - 0xA7 */
149 150
	ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
	ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
151 152
	ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
	ByteOp | ImplicitOps | String, ImplicitOps | String,
A
Avi Kivity 已提交
153
	/* 0xA8 - 0xAF */
154 155 156
	0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
	ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
	ByteOp | ImplicitOps | String, ImplicitOps | String,
157 158 159 160 161 162 163 164 165 166
	/* 0xB0 - 0xB7 */
	ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
	ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
	ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
	ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
	/* 0xB8 - 0xBF */
	DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
	DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
	DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
	DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
A
Avi Kivity 已提交
167
	/* 0xC0 - 0xC7 */
168
	ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
169
	0, ImplicitOps | Stack, 0, 0,
170
	ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
A
Avi Kivity 已提交
171 172 173 174 175 176 177 178
	/* 0xC8 - 0xCF */
	0, 0, 0, 0, 0, 0, 0, 0,
	/* 0xD0 - 0xD7 */
	ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
	ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
	0, 0, 0, 0,
	/* 0xD8 - 0xDF */
	0, 0, 0, 0, 0, 0, 0, 0,
179 180 181
	/* 0xE0 - 0xE7 */
	0, 0, 0, 0, 0, 0, 0, 0,
	/* 0xE8 - 0xEF */
182 183
	ImplicitOps | Stack, SrcImm | ImplicitOps,
	ImplicitOps, SrcImmByte | ImplicitOps,
184
	0, 0, 0, 0,
A
Avi Kivity 已提交
185 186
	/* 0xF0 - 0xF7 */
	0, 0, 0, 0,
187
	ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
A
Avi Kivity 已提交
188
	/* 0xF8 - 0xFF */
189
	ImplicitOps, 0, ImplicitOps, ImplicitOps,
190
	0, 0, Group | Group4, Group | Group5,
A
Avi Kivity 已提交
191 192
};

193
static u16 twobyte_table[256] = {
A
Avi Kivity 已提交
194
	/* 0x00 - 0x0F */
195
	0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
A
Avi Kivity 已提交
196
	ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
A
Avi Kivity 已提交
197 198 199 200 201 202
	/* 0x10 - 0x1F */
	0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
	/* 0x20 - 0x2F */
	ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
	0, 0, 0, 0, 0, 0, 0, 0,
	/* 0x30 - 0x3F */
203
	ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
A
Avi Kivity 已提交
204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220
	/* 0x40 - 0x47 */
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	/* 0x48 - 0x4F */
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
	/* 0x50 - 0x5F */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0x60 - 0x6F */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0x70 - 0x7F */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0x80 - 0x8F */
221 222 223 224
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
	ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
A
Avi Kivity 已提交
225 226 227
	/* 0x90 - 0x9F */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0xA0 - 0xA7 */
228
	0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
A
Avi Kivity 已提交
229
	/* 0xA8 - 0xAF */
230
	0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
A
Avi Kivity 已提交
231 232
	/* 0xB0 - 0xB7 */
	ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
233
	    DstMem | SrcReg | ModRM | BitOp,
A
Avi Kivity 已提交
234 235 236
	0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
	    DstReg | SrcMem16 | ModRM | Mov,
	/* 0xB8 - 0xBF */
237
	0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
A
Avi Kivity 已提交
238 239 240
	0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
	    DstReg | SrcMem16 | ModRM | Mov,
	/* 0xC0 - 0xCF */
241 242
	0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
	0, 0, 0, 0, 0, 0, 0, 0,
A
Avi Kivity 已提交
243 244 245 246 247 248 249 250
	/* 0xD0 - 0xDF */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0xE0 - 0xEF */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
	/* 0xF0 - 0xFF */
	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};

251
static u16 group_table[] = {
252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271
	[Group1_80*8] =
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	[Group1_81*8] =
	DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
	DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
	DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
	DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
	[Group1_82*8] =
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
	[Group1_83*8] =
	DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
	DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
	DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
	DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
272 273
	[Group1A*8] =
	DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
274 275 276 277 278
	[Group3_Byte*8] =
	ByteOp | SrcImm | DstMem | ModRM, 0,
	ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
	0, 0, 0, 0,
	[Group3*8] =
279
	DstMem | SrcImm | ModRM, 0,
280
	DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
281
	0, 0, 0, 0,
282 283 284 285 286 287
	[Group4*8] =
	ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
	0, 0, 0, 0, 0, 0,
	[Group5*8] =
	DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM, 0, 0,
	SrcMem | ModRM, 0, SrcMem | ModRM | Stack, 0,
288 289
	[Group7*8] =
	0, 0, ModRM | SrcMem, ModRM | SrcMem,
290 291
	SrcNone | ModRM | DstMem | Mov, 0,
	SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
292 293 294
};

static u16 group2_table[] = {
295
	[Group7*8] =
296 297 298
	SrcNone | ModRM, 0, 0, 0,
	SrcNone | ModRM | DstMem | Mov, 0,
	SrcMem16 | ModRM | Mov, 0,
299 300
};

A
Avi Kivity 已提交
301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
/* EFLAGS bit definitions. */
#define EFLG_OF (1<<11)
#define EFLG_DF (1<<10)
#define EFLG_SF (1<<7)
#define EFLG_ZF (1<<6)
#define EFLG_AF (1<<4)
#define EFLG_PF (1<<2)
#define EFLG_CF (1<<0)

/*
 * Instruction emulation:
 * Most instructions are emulated directly via a fragment of inline assembly
 * code. This allows us to save/restore EFLAGS and thus very easily pick up
 * any modified flags.
 */

317
#if defined(CONFIG_X86_64)
A
Avi Kivity 已提交
318 319 320 321 322 323 324 325 326 327 328 329 330 331
#define _LO32 "k"		/* force 32-bit operand */
#define _STK  "%%rsp"		/* stack pointer */
#elif defined(__i386__)
#define _LO32 ""		/* force 32-bit operand */
#define _STK  "%%esp"		/* stack pointer */
#endif

/*
 * These EFLAGS bits are restored from saved value during emulation, and
 * any changes are written back to the saved value after emulation.
 */
#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)

/* Before executing instruction: restore necessary bits in EFLAGS. */
332 333 334 335 336 337 338 339 340 341 342 343 344 345 346
#define _PRE_EFLAGS(_sav, _msk, _tmp)					\
	/* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
	"movl %"_sav",%"_LO32 _tmp"; "                                  \
	"push %"_tmp"; "                                                \
	"push %"_tmp"; "                                                \
	"movl %"_msk",%"_LO32 _tmp"; "                                  \
	"andl %"_LO32 _tmp",("_STK"); "                                 \
	"pushf; "                                                       \
	"notl %"_LO32 _tmp"; "                                          \
	"andl %"_LO32 _tmp",("_STK"); "                                 \
	"andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); "	\
	"pop  %"_tmp"; "                                                \
	"orl  %"_LO32 _tmp",("_STK"); "                                 \
	"popf; "                                                        \
	"pop  %"_sav"; "
A
Avi Kivity 已提交
347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363

/* After executing instruction: write-back necessary bits in EFLAGS. */
#define _POST_EFLAGS(_sav, _msk, _tmp) \
	/* _sav |= EFLAGS & _msk; */		\
	"pushf; "				\
	"pop  %"_tmp"; "			\
	"andl %"_msk",%"_LO32 _tmp"; "		\
	"orl  %"_LO32 _tmp",%"_sav"; "

/* Raw emulation: instruction has two explicit operands. */
#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
	do { 								    \
		unsigned long _tmp;					    \
									    \
		switch ((_dst).bytes) {					    \
		case 2:							    \
			__asm__ __volatile__ (				    \
M
Mike Day 已提交
364
				_PRE_EFLAGS("0", "4", "2")		    \
A
Avi Kivity 已提交
365
				_op"w %"_wx"3,%1; "			    \
M
Mike Day 已提交
366
				_POST_EFLAGS("0", "4", "2")		    \
A
Avi Kivity 已提交
367 368
				: "=m" (_eflags), "=m" ((_dst).val),        \
				  "=&r" (_tmp)				    \
M
Mike Day 已提交
369
				: _wy ((_src).val), "i" (EFLAGS_MASK));     \
A
Avi Kivity 已提交
370 371 372
			break;						    \
		case 4:							    \
			__asm__ __volatile__ (				    \
M
Mike Day 已提交
373
				_PRE_EFLAGS("0", "4", "2")		    \
A
Avi Kivity 已提交
374
				_op"l %"_lx"3,%1; "			    \
M
Mike Day 已提交
375
				_POST_EFLAGS("0", "4", "2")		    \
A
Avi Kivity 已提交
376 377
				: "=m" (_eflags), "=m" ((_dst).val),	    \
				  "=&r" (_tmp)				    \
M
Mike Day 已提交
378
				: _ly ((_src).val), "i" (EFLAGS_MASK));     \
A
Avi Kivity 已提交
379 380 381 382 383 384 385 386 387 388
			break;						    \
		case 8:							    \
			__emulate_2op_8byte(_op, _src, _dst,		    \
					    _eflags, _qx, _qy);		    \
			break;						    \
		}							    \
	} while (0)

#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
	do {								     \
389
		unsigned long __tmp;					     \
M
Mike Day 已提交
390
		switch ((_dst).bytes) {				             \
A
Avi Kivity 已提交
391 392
		case 1:							     \
			__asm__ __volatile__ (				     \
M
Mike Day 已提交
393
				_PRE_EFLAGS("0", "4", "2")		     \
A
Avi Kivity 已提交
394
				_op"b %"_bx"3,%1; "			     \
M
Mike Day 已提交
395
				_POST_EFLAGS("0", "4", "2")		     \
A
Avi Kivity 已提交
396
				: "=m" (_eflags), "=m" ((_dst).val),	     \
397
				  "=&r" (__tmp)				     \
M
Mike Day 已提交
398
				: _by ((_src).val), "i" (EFLAGS_MASK));      \
A
Avi Kivity 已提交
399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
			break;						     \
		default:						     \
			__emulate_2op_nobyte(_op, _src, _dst, _eflags,	     \
					     _wx, _wy, _lx, _ly, _qx, _qy);  \
			break;						     \
		}							     \
	} while (0)

/* Source operand is byte-sized and may be restricted to just %cl. */
#define emulate_2op_SrcB(_op, _src, _dst, _eflags)                      \
	__emulate_2op(_op, _src, _dst, _eflags,				\
		      "b", "c", "b", "c", "b", "c", "b", "c")

/* Source operand is byte, word, long or quad sized. */
#define emulate_2op_SrcV(_op, _src, _dst, _eflags)                      \
	__emulate_2op(_op, _src, _dst, _eflags,				\
		      "b", "q", "w", "r", _LO32, "r", "", "r")

/* Source operand is word, long or quad sized. */
#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags)               \
	__emulate_2op_nobyte(_op, _src, _dst, _eflags,			\
			     "w", "r", _LO32, "r", "", "r")

/* Instruction has only one explicit operand (no source operand). */
#define emulate_1op(_op, _dst, _eflags)                                    \
	do {								\
		unsigned long _tmp;					\
									\
M
Mike Day 已提交
427
		switch ((_dst).bytes) {				        \
A
Avi Kivity 已提交
428 429
		case 1:							\
			__asm__ __volatile__ (				\
M
Mike Day 已提交
430
				_PRE_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
431
				_op"b %1; "				\
M
Mike Day 已提交
432
				_POST_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
433 434
				: "=m" (_eflags), "=m" ((_dst).val),	\
				  "=&r" (_tmp)				\
M
Mike Day 已提交
435
				: "i" (EFLAGS_MASK));			\
A
Avi Kivity 已提交
436 437 438
			break;						\
		case 2:							\
			__asm__ __volatile__ (				\
M
Mike Day 已提交
439
				_PRE_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
440
				_op"w %1; "				\
M
Mike Day 已提交
441
				_POST_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
442 443
				: "=m" (_eflags), "=m" ((_dst).val),	\
				  "=&r" (_tmp)				\
M
Mike Day 已提交
444
				: "i" (EFLAGS_MASK));			\
A
Avi Kivity 已提交
445 446 447
			break;						\
		case 4:							\
			__asm__ __volatile__ (				\
M
Mike Day 已提交
448
				_PRE_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
449
				_op"l %1; "				\
M
Mike Day 已提交
450
				_POST_EFLAGS("0", "3", "2")		\
A
Avi Kivity 已提交
451 452
				: "=m" (_eflags), "=m" ((_dst).val),	\
				  "=&r" (_tmp)				\
M
Mike Day 已提交
453
				: "i" (EFLAGS_MASK));			\
A
Avi Kivity 已提交
454 455 456 457 458 459 460 461
			break;						\
		case 8:							\
			__emulate_1op_8byte(_op, _dst, _eflags);	\
			break;						\
		}							\
	} while (0)

/* Emulate an instruction with quadword operands (x86/64 only). */
462
#if defined(CONFIG_X86_64)
A
Avi Kivity 已提交
463 464 465
#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)           \
	do {								  \
		__asm__ __volatile__ (					  \
M
Mike Day 已提交
466
			_PRE_EFLAGS("0", "4", "2")			  \
A
Avi Kivity 已提交
467
			_op"q %"_qx"3,%1; "				  \
M
Mike Day 已提交
468
			_POST_EFLAGS("0", "4", "2")			  \
A
Avi Kivity 已提交
469
			: "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
M
Mike Day 已提交
470
			: _qy ((_src).val), "i" (EFLAGS_MASK));		\
A
Avi Kivity 已提交
471 472 473 474 475
	} while (0)

#define __emulate_1op_8byte(_op, _dst, _eflags)                           \
	do {								  \
		__asm__ __volatile__ (					  \
M
Mike Day 已提交
476
			_PRE_EFLAGS("0", "3", "2")			  \
A
Avi Kivity 已提交
477
			_op"q %1; "					  \
M
Mike Day 已提交
478
			_POST_EFLAGS("0", "3", "2")			  \
A
Avi Kivity 已提交
479
			: "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
M
Mike Day 已提交
480
			: "i" (EFLAGS_MASK));				  \
A
Avi Kivity 已提交
481 482 483 484 485 486 487 488 489 490
	} while (0)

#elif defined(__i386__)
#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
#define __emulate_1op_8byte(_op, _dst, _eflags)
#endif				/* __i386__ */

/* Fetch next part of the instruction being emulated. */
#define insn_fetch(_type, _size, _eip)                                  \
({	unsigned long _x;						\
491
	rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size));		\
M
Mike Day 已提交
492
	if (rc != 0)							\
A
Avi Kivity 已提交
493 494 495 496 497
		goto done;						\
	(_eip) += (_size);						\
	(_type)_x;							\
})

498 499 500 501 502
static inline unsigned long ad_mask(struct decode_cache *c)
{
	return (1UL << (c->ad_bytes << 3)) - 1;
}

A
Avi Kivity 已提交
503
/* Access/update address held in a register, based on addressing mode. */
504 505 506 507 508 509 510 511 512 513 514 515 516 517 518
static inline unsigned long
address_mask(struct decode_cache *c, unsigned long reg)
{
	if (c->ad_bytes == sizeof(unsigned long))
		return reg;
	else
		return reg & ad_mask(c);
}

static inline unsigned long
register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
{
	return base + address_mask(c, reg);
}

519 520 521 522 523 524 525 526
static inline void
register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
{
	if (c->ad_bytes == sizeof(unsigned long))
		*reg += inc;
	else
		*reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
}
A
Avi Kivity 已提交
527

528 529 530 531
static inline void jmp_rel(struct decode_cache *c, int rel)
{
	register_address_increment(c, &c->eip, rel);
}
532

533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565
static void set_seg_override(struct decode_cache *c, int seg)
{
	c->has_seg_override = true;
	c->seg_override = seg;
}

static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
{
	if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
		return 0;

	return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
}

static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
				       struct decode_cache *c)
{
	if (!c->has_seg_override)
		return 0;

	return seg_base(ctxt, c->seg_override);
}

static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
{
	return seg_base(ctxt, VCPU_SREG_ES);
}

static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
{
	return seg_base(ctxt, VCPU_SREG_SS);
}

566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600
static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
			      struct x86_emulate_ops *ops,
			      unsigned long linear, u8 *dest)
{
	struct fetch_cache *fc = &ctxt->decode.fetch;
	int rc;
	int size;

	if (linear < fc->start || linear >= fc->end) {
		size = min(15UL, PAGE_SIZE - offset_in_page(linear));
		rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
		if (rc)
			return rc;
		fc->start = linear;
		fc->end = linear + size;
	}
	*dest = fc->data[linear - fc->start];
	return 0;
}

static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
			 struct x86_emulate_ops *ops,
			 unsigned long eip, void *dest, unsigned size)
{
	int rc = 0;

	eip += ctxt->cs_base;
	while (size--) {
		rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
		if (rc)
			return rc;
	}
	return 0;
}

601 602 603 604 605 606 607
/*
 * Given the 'reg' portion of a ModRM byte, and a register block, return a
 * pointer into the block that addresses the relevant register.
 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
 */
static void *decode_register(u8 modrm_reg, unsigned long *regs,
			     int highbyte_regs)
A
Avi Kivity 已提交
608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626
{
	void *p;

	p = &regs[modrm_reg];
	if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
		p = (unsigned char *)&regs[modrm_reg & 3] + 1;
	return p;
}

static int read_descriptor(struct x86_emulate_ctxt *ctxt,
			   struct x86_emulate_ops *ops,
			   void *ptr,
			   u16 *size, unsigned long *address, int op_bytes)
{
	int rc;

	if (op_bytes == 2)
		op_bytes = 3;
	*address = 0;
627 628
	rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
			   ctxt->vcpu);
A
Avi Kivity 已提交
629 630
	if (rc)
		return rc;
631 632
	rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
			   ctxt->vcpu);
A
Avi Kivity 已提交
633 634 635
	return rc;
}

636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670
static int test_cc(unsigned int condition, unsigned int flags)
{
	int rc = 0;

	switch ((condition & 15) >> 1) {
	case 0: /* o */
		rc |= (flags & EFLG_OF);
		break;
	case 1: /* b/c/nae */
		rc |= (flags & EFLG_CF);
		break;
	case 2: /* z/e */
		rc |= (flags & EFLG_ZF);
		break;
	case 3: /* be/na */
		rc |= (flags & (EFLG_CF|EFLG_ZF));
		break;
	case 4: /* s */
		rc |= (flags & EFLG_SF);
		break;
	case 5: /* p/pe */
		rc |= (flags & EFLG_PF);
		break;
	case 7: /* le/ng */
		rc |= (flags & EFLG_ZF);
		/* fall through */
	case 6: /* l/nge */
		rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
		break;
	}

	/* Odd condition identifiers (lsb == 1) have inverted sense. */
	return (!!rc ^ (condition & 1));
}

671 672 673 674
static void decode_register_operand(struct operand *op,
				    struct decode_cache *c,
				    int inhibit_bytereg)
{
675
	unsigned reg = c->modrm_reg;
676
	int highbyte_regs = c->rex_prefix == 0;
677 678 679

	if (!(c->d & ModRM))
		reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
680 681
	op->type = OP_REG;
	if ((c->d & ByteOp) && !inhibit_bytereg) {
682
		op->ptr = decode_register(reg, c->regs, highbyte_regs);
683 684 685
		op->val = *(u8 *)op->ptr;
		op->bytes = 1;
	} else {
686
		op->ptr = decode_register(reg, c->regs, 0);
687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702
		op->bytes = c->op_bytes;
		switch (op->bytes) {
		case 2:
			op->val = *(u16 *)op->ptr;
			break;
		case 4:
			op->val = *(u32 *)op->ptr;
			break;
		case 8:
			op->val = *(u64 *) op->ptr;
			break;
		}
	}
	op->orig_val = op->val;
}

703 704 705 706 707
static int decode_modrm(struct x86_emulate_ctxt *ctxt,
			struct x86_emulate_ops *ops)
{
	struct decode_cache *c = &ctxt->decode;
	u8 sib;
708
	int index_reg = 0, base_reg = 0, scale;
709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724
	int rc = 0;

	if (c->rex_prefix) {
		c->modrm_reg = (c->rex_prefix & 4) << 1;	/* REX.R */
		index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
		c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
	}

	c->modrm = insn_fetch(u8, 1, c->eip);
	c->modrm_mod |= (c->modrm & 0xc0) >> 6;
	c->modrm_reg |= (c->modrm & 0x38) >> 3;
	c->modrm_rm |= (c->modrm & 0x07);
	c->modrm_ea = 0;
	c->use_modrm_ea = 1;

	if (c->modrm_mod == 3) {
725 726 727
		c->modrm_ptr = decode_register(c->modrm_rm,
					       c->regs, c->d & ByteOp);
		c->modrm_val = *(unsigned long *)c->modrm_ptr;
728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778
		return rc;
	}

	if (c->ad_bytes == 2) {
		unsigned bx = c->regs[VCPU_REGS_RBX];
		unsigned bp = c->regs[VCPU_REGS_RBP];
		unsigned si = c->regs[VCPU_REGS_RSI];
		unsigned di = c->regs[VCPU_REGS_RDI];

		/* 16-bit ModR/M decode. */
		switch (c->modrm_mod) {
		case 0:
			if (c->modrm_rm == 6)
				c->modrm_ea += insn_fetch(u16, 2, c->eip);
			break;
		case 1:
			c->modrm_ea += insn_fetch(s8, 1, c->eip);
			break;
		case 2:
			c->modrm_ea += insn_fetch(u16, 2, c->eip);
			break;
		}
		switch (c->modrm_rm) {
		case 0:
			c->modrm_ea += bx + si;
			break;
		case 1:
			c->modrm_ea += bx + di;
			break;
		case 2:
			c->modrm_ea += bp + si;
			break;
		case 3:
			c->modrm_ea += bp + di;
			break;
		case 4:
			c->modrm_ea += si;
			break;
		case 5:
			c->modrm_ea += di;
			break;
		case 6:
			if (c->modrm_mod != 0)
				c->modrm_ea += bp;
			break;
		case 7:
			c->modrm_ea += bx;
			break;
		}
		if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
		    (c->modrm_rm == 6 && c->modrm_mod != 0))
779 780
			if (!c->has_seg_override)
				set_seg_override(c, VCPU_SREG_SS);
781 782 783
		c->modrm_ea = (u16)c->modrm_ea;
	} else {
		/* 32/64-bit ModR/M decode. */
784
		if ((c->modrm_rm & 7) == 4) {
785 786 787 788 789
			sib = insn_fetch(u8, 1, c->eip);
			index_reg |= (sib >> 3) & 7;
			base_reg |= sib & 7;
			scale = sib >> 6;

790 791 792
			if ((base_reg & 7) == 5 && c->modrm_mod == 0)
				c->modrm_ea += insn_fetch(s32, 4, c->eip);
			else
793
				c->modrm_ea += c->regs[base_reg];
794
			if (index_reg != 4)
795
				c->modrm_ea += c->regs[index_reg] << scale;
796 797
		} else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
			if (ctxt->mode == X86EMUL_MODE_PROT64)
798
				c->rip_relative = 1;
799
		} else
800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838
			c->modrm_ea += c->regs[c->modrm_rm];
		switch (c->modrm_mod) {
		case 0:
			if (c->modrm_rm == 5)
				c->modrm_ea += insn_fetch(s32, 4, c->eip);
			break;
		case 1:
			c->modrm_ea += insn_fetch(s8, 1, c->eip);
			break;
		case 2:
			c->modrm_ea += insn_fetch(s32, 4, c->eip);
			break;
		}
	}
done:
	return rc;
}

static int decode_abs(struct x86_emulate_ctxt *ctxt,
		      struct x86_emulate_ops *ops)
{
	struct decode_cache *c = &ctxt->decode;
	int rc = 0;

	switch (c->ad_bytes) {
	case 2:
		c->modrm_ea = insn_fetch(u16, 2, c->eip);
		break;
	case 4:
		c->modrm_ea = insn_fetch(u32, 4, c->eip);
		break;
	case 8:
		c->modrm_ea = insn_fetch(u64, 8, c->eip);
		break;
	}
done:
	return rc;
}

A
Avi Kivity 已提交
839
int
840
x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
A
Avi Kivity 已提交
841
{
842
	struct decode_cache *c = &ctxt->decode;
A
Avi Kivity 已提交
843 844
	int rc = 0;
	int mode = ctxt->mode;
845
	int def_op_bytes, def_ad_bytes, group;
A
Avi Kivity 已提交
846 847 848

	/* Shadow copy of register state. Committed on successful emulation. */

849
	memset(c, 0, sizeof(struct decode_cache));
850
	c->eip = kvm_rip_read(ctxt->vcpu);
851
	ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
852
	memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
A
Avi Kivity 已提交
853 854 855 856

	switch (mode) {
	case X86EMUL_MODE_REAL:
	case X86EMUL_MODE_PROT16:
857
		def_op_bytes = def_ad_bytes = 2;
A
Avi Kivity 已提交
858 859
		break;
	case X86EMUL_MODE_PROT32:
860
		def_op_bytes = def_ad_bytes = 4;
A
Avi Kivity 已提交
861
		break;
862
#ifdef CONFIG_X86_64
A
Avi Kivity 已提交
863
	case X86EMUL_MODE_PROT64:
864 865
		def_op_bytes = 4;
		def_ad_bytes = 8;
A
Avi Kivity 已提交
866 867 868 869 870 871
		break;
#endif
	default:
		return -1;
	}

872 873 874
	c->op_bytes = def_op_bytes;
	c->ad_bytes = def_ad_bytes;

A
Avi Kivity 已提交
875
	/* Legacy prefixes. */
876
	for (;;) {
877
		switch (c->b = insn_fetch(u8, 1, c->eip)) {
A
Avi Kivity 已提交
878
		case 0x66:	/* operand-size override */
879 880
			/* switch between 2/4 bytes */
			c->op_bytes = def_op_bytes ^ 6;
A
Avi Kivity 已提交
881 882 883
			break;
		case 0x67:	/* address-size override */
			if (mode == X86EMUL_MODE_PROT64)
884
				/* switch between 4/8 bytes */
885
				c->ad_bytes = def_ad_bytes ^ 12;
A
Avi Kivity 已提交
886
			else
887
				/* switch between 2/4 bytes */
888
				c->ad_bytes = def_ad_bytes ^ 6;
A
Avi Kivity 已提交
889
			break;
890
		case 0x26:	/* ES override */
A
Avi Kivity 已提交
891
		case 0x2e:	/* CS override */
892
		case 0x36:	/* SS override */
A
Avi Kivity 已提交
893
		case 0x3e:	/* DS override */
894
			set_seg_override(c, (c->b >> 3) & 3);
A
Avi Kivity 已提交
895 896 897
			break;
		case 0x64:	/* FS override */
		case 0x65:	/* GS override */
898
			set_seg_override(c, c->b & 7);
A
Avi Kivity 已提交
899
			break;
900 901 902
		case 0x40 ... 0x4f: /* REX */
			if (mode != X86EMUL_MODE_PROT64)
				goto done_prefixes;
903
			c->rex_prefix = c->b;
904
			continue;
A
Avi Kivity 已提交
905
		case 0xf0:	/* LOCK */
906
			c->lock_prefix = 1;
A
Avi Kivity 已提交
907
			break;
908
		case 0xf2:	/* REPNE/REPNZ */
909 910
			c->rep_prefix = REPNE_PREFIX;
			break;
A
Avi Kivity 已提交
911
		case 0xf3:	/* REP/REPE/REPZ */
912
			c->rep_prefix = REPE_PREFIX;
A
Avi Kivity 已提交
913 914 915 916
			break;
		default:
			goto done_prefixes;
		}
917 918 919

		/* Any legacy prefix after a REX prefix nullifies its effect. */

920
		c->rex_prefix = 0;
A
Avi Kivity 已提交
921 922 923 924 925
	}

done_prefixes:

	/* REX prefix. */
926
	if (c->rex_prefix)
927
		if (c->rex_prefix & 8)
928
			c->op_bytes = 8;	/* REX.W */
A
Avi Kivity 已提交
929 930

	/* Opcode byte(s). */
931 932
	c->d = opcode_table[c->b];
	if (c->d == 0) {
A
Avi Kivity 已提交
933
		/* Two-byte opcode? */
934 935 936 937
		if (c->b == 0x0f) {
			c->twobyte = 1;
			c->b = insn_fetch(u8, 1, c->eip);
			c->d = twobyte_table[c->b];
A
Avi Kivity 已提交
938
		}
939
	}
A
Avi Kivity 已提交
940

941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956
	if (c->d & Group) {
		group = c->d & GroupMask;
		c->modrm = insn_fetch(u8, 1, c->eip);
		--c->eip;

		group = (group << 3) + ((c->modrm >> 3) & 7);
		if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
			c->d = group2_table[group];
		else
			c->d = group_table[group];
	}

	/* Unrecognised? */
	if (c->d == 0) {
		DPRINTF("Cannot emulate %02x\n", c->b);
		return -1;
A
Avi Kivity 已提交
957 958
	}

959 960 961
	if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
		c->op_bytes = 8;

A
Avi Kivity 已提交
962
	/* ModRM and SIB bytes. */
963 964 965 966 967 968
	if (c->d & ModRM)
		rc = decode_modrm(ctxt, ops);
	else if (c->d & MemAbs)
		rc = decode_abs(ctxt, ops);
	if (rc)
		goto done;
A
Avi Kivity 已提交
969

970 971
	if (!c->has_seg_override)
		set_seg_override(c, VCPU_SREG_DS);
972

973 974
	if (!(!c->twobyte && c->b == 0x8d))
		c->modrm_ea += seg_override_base(ctxt, c);
975 976 977

	if (c->ad_bytes != 8)
		c->modrm_ea = (u32)c->modrm_ea;
A
Avi Kivity 已提交
978 979 980 981
	/*
	 * Decode and fetch the source operand: register, memory
	 * or immediate.
	 */
982
	switch (c->d & SrcMask) {
A
Avi Kivity 已提交
983 984 985
	case SrcNone:
		break;
	case SrcReg:
986
		decode_register_operand(&c->src, c, 0);
A
Avi Kivity 已提交
987 988
		break;
	case SrcMem16:
989
		c->src.bytes = 2;
A
Avi Kivity 已提交
990 991
		goto srcmem_common;
	case SrcMem32:
992
		c->src.bytes = 4;
A
Avi Kivity 已提交
993 994
		goto srcmem_common;
	case SrcMem:
995 996
		c->src.bytes = (c->d & ByteOp) ? 1 :
							   c->op_bytes;
997
		/* Don't fetch the address for invlpg: it could be unmapped. */
M
Mike Day 已提交
998
		if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
999
			break;
M
Mike Day 已提交
1000
	srcmem_common:
1001 1002 1003 1004
		/*
		 * For instructions with a ModR/M byte, switch to register
		 * access if Mod = 3.
		 */
1005 1006
		if ((c->d & ModRM) && c->modrm_mod == 3) {
			c->src.type = OP_REG;
1007
			c->src.val = c->modrm_val;
1008
			c->src.ptr = c->modrm_ptr;
1009 1010
			break;
		}
1011
		c->src.type = OP_MEM;
A
Avi Kivity 已提交
1012 1013
		break;
	case SrcImm:
1014 1015 1016 1017 1018
		c->src.type = OP_IMM;
		c->src.ptr = (unsigned long *)c->eip;
		c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
		if (c->src.bytes == 8)
			c->src.bytes = 4;
A
Avi Kivity 已提交
1019
		/* NB. Immediates are sign-extended as necessary. */
1020
		switch (c->src.bytes) {
A
Avi Kivity 已提交
1021
		case 1:
1022
			c->src.val = insn_fetch(s8, 1, c->eip);
A
Avi Kivity 已提交
1023 1024
			break;
		case 2:
1025
			c->src.val = insn_fetch(s16, 2, c->eip);
A
Avi Kivity 已提交
1026 1027
			break;
		case 4:
1028
			c->src.val = insn_fetch(s32, 4, c->eip);
A
Avi Kivity 已提交
1029 1030 1031 1032
			break;
		}
		break;
	case SrcImmByte:
1033 1034 1035 1036
		c->src.type = OP_IMM;
		c->src.ptr = (unsigned long *)c->eip;
		c->src.bytes = 1;
		c->src.val = insn_fetch(s8, 1, c->eip);
A
Avi Kivity 已提交
1037 1038 1039
		break;
	}

1040
	/* Decode and fetch the destination operand: register or memory. */
1041
	switch (c->d & DstMask) {
1042 1043
	case ImplicitOps:
		/* Special instructions do their own operand decoding. */
1044
		return 0;
1045
	case DstReg:
1046
		decode_register_operand(&c->dst, c,
1047
			 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1048 1049
		break;
	case DstMem:
1050
		if ((c->d & ModRM) && c->modrm_mod == 3) {
1051
			c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1052
			c->dst.type = OP_REG;
1053
			c->dst.val = c->dst.orig_val = c->modrm_val;
1054
			c->dst.ptr = c->modrm_ptr;
1055 1056
			break;
		}
1057 1058 1059 1060
		c->dst.type = OP_MEM;
		break;
	}

1061 1062 1063
	if (c->rip_relative)
		c->modrm_ea += c->eip;

1064 1065 1066 1067
done:
	return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
}

1068 1069 1070 1071 1072 1073 1074
static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
{
	struct decode_cache *c = &ctxt->decode;

	c->dst.type  = OP_MEM;
	c->dst.bytes = c->op_bytes;
	c->dst.val = c->src.val;
1075
	register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1076
	c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1077 1078 1079 1080 1081 1082 1083 1084 1085
					       c->regs[VCPU_REGS_RSP]);
}

static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
				struct x86_emulate_ops *ops)
{
	struct decode_cache *c = &ctxt->decode;
	int rc;

1086
	rc = ops->read_std(register_address(c, ss_base(ctxt),
1087 1088 1089 1090 1091
					    c->regs[VCPU_REGS_RSP]),
			   &c->dst.val, c->dst.bytes, ctxt->vcpu);
	if (rc != 0)
		return rc;

1092
	register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1093 1094 1095 1096

	return 0;
}

1097
static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1098
{
1099
	struct decode_cache *c = &ctxt->decode;
1100 1101
	switch (c->modrm_reg) {
	case 0:	/* rol */
1102
		emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1103 1104
		break;
	case 1:	/* ror */
1105
		emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1106 1107
		break;
	case 2:	/* rcl */
1108
		emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1109 1110
		break;
	case 3:	/* rcr */
1111
		emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1112 1113 1114
		break;
	case 4:	/* sal/shl */
	case 6:	/* sal/shl */
1115
		emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1116 1117
		break;
	case 5:	/* shr */
1118
		emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1119 1120
		break;
	case 7:	/* sar */
1121
		emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1122 1123 1124 1125 1126
		break;
	}
}

static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1127
			       struct x86_emulate_ops *ops)
1128 1129 1130 1131 1132 1133
{
	struct decode_cache *c = &ctxt->decode;
	int rc = 0;

	switch (c->modrm_reg) {
	case 0 ... 1:	/* test */
1134
		emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1135 1136 1137 1138 1139
		break;
	case 2:	/* not */
		c->dst.val = ~c->dst.val;
		break;
	case 3:	/* neg */
1140
		emulate_1op("neg", c->dst, ctxt->eflags);
1141 1142 1143 1144 1145 1146 1147 1148 1149 1150
		break;
	default:
		DPRINTF("Cannot emulate %02x\n", c->b);
		rc = X86EMUL_UNHANDLEABLE;
		break;
	}
	return rc;
}

static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1151
			       struct x86_emulate_ops *ops)
1152 1153 1154 1155 1156
{
	struct decode_cache *c = &ctxt->decode;

	switch (c->modrm_reg) {
	case 0:	/* inc */
1157
		emulate_1op("inc", c->dst, ctxt->eflags);
1158 1159
		break;
	case 1:	/* dec */
1160
		emulate_1op("dec", c->dst, ctxt->eflags);
1161 1162
		break;
	case 4: /* jmp abs */
1163
		c->eip = c->src.val;
1164 1165
		break;
	case 6:	/* push */
1166
		emulate_push(ctxt);
1167 1168 1169 1170 1171 1172 1173
		break;
	}
	return 0;
}

static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
			       struct x86_emulate_ops *ops,
1174
			       unsigned long memop)
1175 1176 1177 1178 1179
{
	struct decode_cache *c = &ctxt->decode;
	u64 old, new;
	int rc;

1180
	rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1181 1182 1183 1184 1185 1186 1187 1188
	if (rc != 0)
		return rc;

	if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
	    ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {

		c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
		c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1189
		ctxt->eflags &= ~EFLG_ZF;
1190 1191 1192 1193 1194

	} else {
		new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
		       (u32) c->regs[VCPU_REGS_RBX];

1195
		rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1196 1197
		if (rc != 0)
			return rc;
1198
		ctxt->eflags |= EFLG_ZF;
1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244
	}
	return 0;
}

static inline int writeback(struct x86_emulate_ctxt *ctxt,
			    struct x86_emulate_ops *ops)
{
	int rc;
	struct decode_cache *c = &ctxt->decode;

	switch (c->dst.type) {
	case OP_REG:
		/* The 4-byte case *is* correct:
		 * in 64-bit mode we zero-extend.
		 */
		switch (c->dst.bytes) {
		case 1:
			*(u8 *)c->dst.ptr = (u8)c->dst.val;
			break;
		case 2:
			*(u16 *)c->dst.ptr = (u16)c->dst.val;
			break;
		case 4:
			*c->dst.ptr = (u32)c->dst.val;
			break;	/* 64b: zero-ext */
		case 8:
			*c->dst.ptr = c->dst.val;
			break;
		}
		break;
	case OP_MEM:
		if (c->lock_prefix)
			rc = ops->cmpxchg_emulated(
					(unsigned long)c->dst.ptr,
					&c->dst.orig_val,
					&c->dst.val,
					c->dst.bytes,
					ctxt->vcpu);
		else
			rc = ops->write_emulated(
					(unsigned long)c->dst.ptr,
					&c->dst.val,
					c->dst.bytes,
					ctxt->vcpu);
		if (rc != 0)
			return rc;
1245 1246 1247 1248
		break;
	case OP_NONE:
		/* no writeback */
		break;
1249 1250 1251 1252 1253 1254
	default:
		break;
	}
	return 0;
}

1255
int
1256
x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1257
{
1258
	unsigned long memop = 0;
1259
	u64 msr_data;
1260
	unsigned long saved_eip = 0;
1261
	struct decode_cache *c = &ctxt->decode;
1262
	int rc = 0;
1263

1264 1265 1266 1267 1268
	/* Shadow copy of register state. Committed on successful emulation.
	 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
	 * modify them.
	 */

1269
	memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1270 1271
	saved_eip = c->eip;

1272
	if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1273
		memop = c->modrm_ea;
1274

1275 1276 1277
	if (c->rep_prefix && (c->d & String)) {
		/* All REP prefixes have the same first termination condition */
		if (c->regs[VCPU_REGS_RCX] == 0) {
1278
			kvm_rip_write(ctxt->vcpu, c->eip);
1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291
			goto done;
		}
		/* The second termination condition only applies for REPE
		 * and REPNE. Test if the repeat string operation prefix is
		 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
		 * corresponding termination condition according to:
		 * 	- if REPE/REPZ and ZF = 0 then done
		 * 	- if REPNE/REPNZ and ZF = 1 then done
		 */
		if ((c->b == 0xa6) || (c->b == 0xa7) ||
				(c->b == 0xae) || (c->b == 0xaf)) {
			if ((c->rep_prefix == REPE_PREFIX) &&
				((ctxt->eflags & EFLG_ZF) == 0)) {
1292
					kvm_rip_write(ctxt->vcpu, c->eip);
1293 1294 1295 1296
					goto done;
			}
			if ((c->rep_prefix == REPNE_PREFIX) &&
				((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1297
				kvm_rip_write(ctxt->vcpu, c->eip);
1298 1299 1300 1301
				goto done;
			}
		}
		c->regs[VCPU_REGS_RCX]--;
1302
		c->eip = kvm_rip_read(ctxt->vcpu);
1303 1304
	}

1305
	if (c->src.type == OP_MEM) {
1306
		c->src.ptr = (unsigned long *)memop;
1307
		c->src.val = 0;
M
Mike Day 已提交
1308 1309 1310 1311 1312
		rc = ops->read_emulated((unsigned long)c->src.ptr,
					&c->src.val,
					c->src.bytes,
					ctxt->vcpu);
		if (rc != 0)
1313 1314 1315 1316 1317 1318 1319 1320 1321
			goto done;
		c->src.orig_val = c->src.val;
	}

	if ((c->d & DstMask) == ImplicitOps)
		goto special_insn;


	if (c->dst.type == OP_MEM) {
1322
		c->dst.ptr = (unsigned long *)memop;
1323 1324
		c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
		c->dst.val = 0;
1325 1326
		if (c->d & BitOp) {
			unsigned long mask = ~(c->dst.bytes * 8 - 1);
1327

1328 1329
			c->dst.ptr = (void *)c->dst.ptr +
						   (c->src.val & mask) / 8;
1330
		}
1331 1332 1333 1334 1335
		if (!(c->d & Mov) &&
				   /* optimisation - avoid slow emulated read */
		    ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
					   &c->dst.val,
					  c->dst.bytes, ctxt->vcpu)) != 0))
1336 1337
			goto done;
	}
1338
	c->dst.orig_val = c->dst.val;
1339

1340 1341
special_insn:

1342
	if (c->twobyte)
A
Avi Kivity 已提交
1343 1344
		goto twobyte_insn;

1345
	switch (c->b) {
A
Avi Kivity 已提交
1346 1347
	case 0x00 ... 0x05:
	      add:		/* add */
1348
		emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1349 1350 1351
		break;
	case 0x08 ... 0x0d:
	      or:		/* or */
1352
		emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1353 1354 1355
		break;
	case 0x10 ... 0x15:
	      adc:		/* adc */
1356
		emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1357 1358 1359
		break;
	case 0x18 ... 0x1d:
	      sbb:		/* sbb */
1360
		emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1361
		break;
1362
	case 0x20 ... 0x23:
A
Avi Kivity 已提交
1363
	      and:		/* and */
1364
		emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1365
		break;
1366
	case 0x24:              /* and al imm8 */
1367 1368 1369 1370 1371
		c->dst.type = OP_REG;
		c->dst.ptr = &c->regs[VCPU_REGS_RAX];
		c->dst.val = *(u8 *)c->dst.ptr;
		c->dst.bytes = 1;
		c->dst.orig_val = c->dst.val;
1372 1373
		goto and;
	case 0x25:              /* and ax imm16, or eax imm32 */
1374 1375 1376 1377 1378
		c->dst.type = OP_REG;
		c->dst.bytes = c->op_bytes;
		c->dst.ptr = &c->regs[VCPU_REGS_RAX];
		if (c->op_bytes == 2)
			c->dst.val = *(u16 *)c->dst.ptr;
1379
		else
1380 1381
			c->dst.val = *(u32 *)c->dst.ptr;
		c->dst.orig_val = c->dst.val;
1382
		goto and;
A
Avi Kivity 已提交
1383 1384
	case 0x28 ... 0x2d:
	      sub:		/* sub */
1385
		emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1386 1387 1388
		break;
	case 0x30 ... 0x35:
	      xor:		/* xor */
1389
		emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1390 1391 1392
		break;
	case 0x38 ... 0x3d:
	      cmp:		/* cmp */
1393
		emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1394
		break;
1395 1396 1397 1398 1399 1400 1401 1402 1403 1404
	case 0x40 ... 0x47: /* inc r16/r32 */
		emulate_1op("inc", c->dst, ctxt->eflags);
		break;
	case 0x48 ... 0x4f: /* dec r16/r32 */
		emulate_1op("dec", c->dst, ctxt->eflags);
		break;
	case 0x50 ... 0x57:  /* push reg */
		c->dst.type  = OP_MEM;
		c->dst.bytes = c->op_bytes;
		c->dst.val = c->src.val;
1405
		register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1406 1407
					   -c->op_bytes);
		c->dst.ptr = (void *) register_address(
1408
			c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1409 1410 1411
		break;
	case 0x58 ... 0x5f: /* pop reg */
	pop_instruction:
1412
		if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1413 1414 1415 1416
			c->regs[VCPU_REGS_RSP]), c->dst.ptr,
			c->op_bytes, ctxt->vcpu)) != 0)
			goto done;

1417
		register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1418 1419 1420
					   c->op_bytes);
		c->dst.type = OP_NONE;	/* Disable writeback. */
		break;
A
Avi Kivity 已提交
1421
	case 0x63:		/* movsxd */
1422
		if (ctxt->mode != X86EMUL_MODE_PROT64)
A
Avi Kivity 已提交
1423
			goto cannot_emulate;
1424
		c->dst.val = (s32) c->src.val;
A
Avi Kivity 已提交
1425
		break;
1426
	case 0x68: /* push imm */
1427 1428 1429 1430 1431 1432 1433 1434 1435
	case 0x6a: /* push imm8 */
		emulate_push(ctxt);
		break;
	case 0x6c:		/* insb */
	case 0x6d:		/* insw/insd */
		 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
				1,
				(c->d & ByteOp) ? 1 : c->op_bytes,
				c->rep_prefix ?
1436
				address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1437
				(ctxt->eflags & EFLG_DF),
1438
				register_address(c, es_base(ctxt),
1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451
						 c->regs[VCPU_REGS_RDI]),
				c->rep_prefix,
				c->regs[VCPU_REGS_RDX]) == 0) {
			c->eip = saved_eip;
			return -1;
		}
		return 0;
	case 0x6e:		/* outsb */
	case 0x6f:		/* outsw/outsd */
		if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
				0,
				(c->d & ByteOp) ? 1 : c->op_bytes,
				c->rep_prefix ?
1452
				address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1453
				(ctxt->eflags & EFLG_DF),
1454 1455
					 register_address(c,
					  seg_override_base(ctxt, c),
1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466
						 c->regs[VCPU_REGS_RSI]),
				c->rep_prefix,
				c->regs[VCPU_REGS_RDX]) == 0) {
			c->eip = saved_eip;
			return -1;
		}
		return 0;
	case 0x70 ... 0x7f: /* jcc (short) */ {
		int rel = insn_fetch(s8, 1, c->eip);

		if (test_cc(c->b, ctxt->eflags))
1467
			jmp_rel(c, rel);
1468 1469
		break;
	}
A
Avi Kivity 已提交
1470
	case 0x80 ... 0x83:	/* Grp1 */
1471
		switch (c->modrm_reg) {
A
Avi Kivity 已提交
1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490
		case 0:
			goto add;
		case 1:
			goto or;
		case 2:
			goto adc;
		case 3:
			goto sbb;
		case 4:
			goto and;
		case 5:
			goto sub;
		case 6:
			goto xor;
		case 7:
			goto cmp;
		}
		break;
	case 0x84 ... 0x85:
1491
		emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1492 1493
		break;
	case 0x86 ... 0x87:	/* xchg */
1494
	xchg:
A
Avi Kivity 已提交
1495
		/* Write back the register source. */
1496
		switch (c->dst.bytes) {
A
Avi Kivity 已提交
1497
		case 1:
1498
			*(u8 *) c->src.ptr = (u8) c->dst.val;
A
Avi Kivity 已提交
1499 1500
			break;
		case 2:
1501
			*(u16 *) c->src.ptr = (u16) c->dst.val;
A
Avi Kivity 已提交
1502 1503
			break;
		case 4:
1504
			*c->src.ptr = (u32) c->dst.val;
A
Avi Kivity 已提交
1505 1506
			break;	/* 64b reg: zero-extend */
		case 8:
1507
			*c->src.ptr = c->dst.val;
A
Avi Kivity 已提交
1508 1509 1510 1511 1512 1513
			break;
		}
		/*
		 * Write back the memory destination with implicit LOCK
		 * prefix.
		 */
1514 1515
		c->dst.val = c->src.val;
		c->lock_prefix = 1;
A
Avi Kivity 已提交
1516 1517
		break;
	case 0x88 ... 0x8b:	/* mov */
1518
		goto mov;
1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531
	case 0x8c: { /* mov r/m, sreg */
		struct kvm_segment segreg;

		if (c->modrm_reg <= 5)
			kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
		else {
			printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
			       c->modrm);
			goto cannot_emulate;
		}
		c->dst.val = segreg.selector;
		break;
	}
N
Nitin A Kamble 已提交
1532
	case 0x8d: /* lea r16/r32, m */
1533
		c->dst.val = c->modrm_ea;
N
Nitin A Kamble 已提交
1534
		break;
1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556
	case 0x8e: { /* mov seg, r/m16 */
		uint16_t sel;
		int type_bits;
		int err;

		sel = c->src.val;
		if (c->modrm_reg <= 5) {
			type_bits = (c->modrm_reg == 1) ? 9 : 1;
			err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
							  type_bits, c->modrm_reg);
		} else {
			printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
					c->modrm);
			goto cannot_emulate;
		}

		if (err < 0)
			goto cannot_emulate;

		c->dst.type = OP_NONE;  /* Disable writeback. */
		break;
	}
A
Avi Kivity 已提交
1557
	case 0x8f:		/* pop (sole member of Grp1a) */
1558 1559
		rc = emulate_grp1a(ctxt, ops);
		if (rc != 0)
A
Avi Kivity 已提交
1560 1561
			goto done;
		break;
1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572
	case 0x90: /* nop / xchg r8,rax */
		if (!(c->rex_prefix & 1)) { /* nop */
			c->dst.type = OP_NONE;
			break;
		}
	case 0x91 ... 0x97: /* xchg reg,rax */
		c->src.type = c->dst.type = OP_REG;
		c->src.bytes = c->dst.bytes = c->op_bytes;
		c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
		c->src.val = *(c->src.ptr);
		goto xchg;
N
Nitin A Kamble 已提交
1573
	case 0x9c: /* pushf */
1574
		c->src.val =  (unsigned long) ctxt->eflags;
1575 1576
		emulate_push(ctxt);
		break;
N
Nitin A Kamble 已提交
1577
	case 0x9d: /* popf */
1578
		c->dst.ptr = (unsigned long *) &ctxt->eflags;
N
Nitin A Kamble 已提交
1579
		goto pop_instruction;
1580 1581 1582 1583 1584 1585 1586
	case 0xa0 ... 0xa1:	/* mov */
		c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
		c->dst.val = c->src.val;
		break;
	case 0xa2 ... 0xa3:	/* mov */
		c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
		break;
A
Avi Kivity 已提交
1587
	case 0xa4 ... 0xa5:	/* movs */
1588 1589
		c->dst.type = OP_MEM;
		c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1590
		c->dst.ptr = (unsigned long *)register_address(c,
1591
						   es_base(ctxt),
1592
						   c->regs[VCPU_REGS_RDI]);
1593
		if ((rc = ops->read_emulated(register_address(c,
1594
					   seg_override_base(ctxt, c),
1595 1596 1597
					c->regs[VCPU_REGS_RSI]),
					&c->dst.val,
					c->dst.bytes, ctxt->vcpu)) != 0)
A
Avi Kivity 已提交
1598
			goto done;
1599
		register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1600
				       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1601
							   : c->dst.bytes);
1602
		register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1603
				       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1604
							   : c->dst.bytes);
A
Avi Kivity 已提交
1605 1606
		break;
	case 0xa6 ... 0xa7:	/* cmps */
1607 1608
		c->src.type = OP_NONE; /* Disable writeback. */
		c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1609
		c->src.ptr = (unsigned long *)register_address(c,
1610
				       seg_override_base(ctxt, c),
1611 1612 1613 1614 1615 1616 1617 1618 1619
						   c->regs[VCPU_REGS_RSI]);
		if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
						&c->src.val,
						c->src.bytes,
						ctxt->vcpu)) != 0)
			goto done;

		c->dst.type = OP_NONE; /* Disable writeback. */
		c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1620
		c->dst.ptr = (unsigned long *)register_address(c,
1621
						   es_base(ctxt),
1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632
						   c->regs[VCPU_REGS_RDI]);
		if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
						&c->dst.val,
						c->dst.bytes,
						ctxt->vcpu)) != 0)
			goto done;

		DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);

		emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);

1633
		register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1634 1635
				       (ctxt->eflags & EFLG_DF) ? -c->src.bytes
								  : c->src.bytes);
1636
		register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1637 1638 1639 1640
				       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
								  : c->dst.bytes);

		break;
A
Avi Kivity 已提交
1641
	case 0xaa ... 0xab:	/* stos */
1642 1643
		c->dst.type = OP_MEM;
		c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1644
		c->dst.ptr = (unsigned long *)register_address(c,
1645
						   es_base(ctxt),
1646
						   c->regs[VCPU_REGS_RDI]);
1647
		c->dst.val = c->regs[VCPU_REGS_RAX];
1648
		register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1649
				       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1650
							   : c->dst.bytes);
A
Avi Kivity 已提交
1651 1652
		break;
	case 0xac ... 0xad:	/* lods */
1653 1654 1655
		c->dst.type = OP_REG;
		c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
		c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1656
		if ((rc = ops->read_emulated(register_address(c,
1657
						 seg_override_base(ctxt, c),
1658 1659 1660 1661
						 c->regs[VCPU_REGS_RSI]),
						 &c->dst.val,
						 c->dst.bytes,
						 ctxt->vcpu)) != 0)
A
Avi Kivity 已提交
1662
			goto done;
1663
		register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1664
				       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1665
							   : c->dst.bytes);
A
Avi Kivity 已提交
1666 1667 1668 1669
		break;
	case 0xae ... 0xaf:	/* scas */
		DPRINTF("Urk! I don't handle SCAS.\n");
		goto cannot_emulate;
1670
	case 0xb0 ... 0xbf: /* mov r, imm */
1671
		goto mov;
1672 1673 1674
	case 0xc0 ... 0xc1:
		emulate_grp2(ctxt);
		break;
1675 1676 1677
	case 0xc3: /* ret */
		c->dst.ptr = &c->eip;
		goto pop_instruction;
1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689
	case 0xc6 ... 0xc7:	/* mov (sole member of Grp11) */
	mov:
		c->dst.val = c->src.val;
		break;
	case 0xd0 ... 0xd1:	/* Grp2 */
		c->src.val = 1;
		emulate_grp2(ctxt);
		break;
	case 0xd2 ... 0xd3:	/* Grp2 */
		c->src.val = c->regs[VCPU_REGS_RCX];
		emulate_grp2(ctxt);
		break;
1690 1691
	case 0xe8: /* call (near) */ {
		long int rel;
1692
		switch (c->op_bytes) {
1693
		case 2:
1694
			rel = insn_fetch(s16, 2, c->eip);
1695 1696
			break;
		case 4:
1697
			rel = insn_fetch(s32, 4, c->eip);
1698 1699 1700 1701 1702
			break;
		default:
			DPRINTF("Call: Invalid op_bytes\n");
			goto cannot_emulate;
		}
1703
		c->src.val = (unsigned long) c->eip;
1704
		jmp_rel(c, rel);
1705
		c->op_bytes = c->ad_bytes;
1706 1707
		emulate_push(ctxt);
		break;
1708 1709
	}
	case 0xe9: /* jmp rel */
1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736
		goto jmp;
	case 0xea: /* jmp far */ {
		uint32_t eip;
		uint16_t sel;

		switch (c->op_bytes) {
		case 2:
			eip = insn_fetch(u16, 2, c->eip);
			break;
		case 4:
			eip = insn_fetch(u32, 4, c->eip);
			break;
		default:
			DPRINTF("jmp far: Invalid op_bytes\n");
			goto cannot_emulate;
		}
		sel = insn_fetch(u16, 2, c->eip);
		if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
			DPRINTF("jmp far: Failed to load CS descriptor\n");
			goto cannot_emulate;
		}

		c->eip = eip;
		break;
	}
	case 0xeb:
	      jmp:		/* jmp rel short */
1737
		jmp_rel(c, c->src.val);
1738
		c->dst.type = OP_NONE; /* Disable writeback. */
1739
		break;
1740
	case 0xf4:              /* hlt */
1741
		ctxt->vcpu->arch.halt_request = 1;
1742
		break;
1743 1744 1745 1746 1747
	case 0xf5:	/* cmc */
		/* complement carry flag from eflags reg */
		ctxt->eflags ^= EFLG_CF;
		c->dst.type = OP_NONE;	/* Disable writeback. */
		break;
1748 1749 1750 1751 1752
	case 0xf6 ... 0xf7:	/* Grp3 */
		rc = emulate_grp3(ctxt, ops);
		if (rc != 0)
			goto done;
		break;
1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764
	case 0xf8: /* clc */
		ctxt->eflags &= ~EFLG_CF;
		c->dst.type = OP_NONE;	/* Disable writeback. */
		break;
	case 0xfa: /* cli */
		ctxt->eflags &= ~X86_EFLAGS_IF;
		c->dst.type = OP_NONE;	/* Disable writeback. */
		break;
	case 0xfb: /* sti */
		ctxt->eflags |= X86_EFLAGS_IF;
		c->dst.type = OP_NONE;	/* Disable writeback. */
		break;
1765 1766 1767 1768 1769
	case 0xfe ... 0xff:	/* Grp4/Grp5 */
		rc = emulate_grp45(ctxt, ops);
		if (rc != 0)
			goto done;
		break;
A
Avi Kivity 已提交
1770
	}
1771 1772 1773 1774 1775 1776 1777

writeback:
	rc = writeback(ctxt, ops);
	if (rc != 0)
		goto done;

	/* Commit shadow register state. */
1778
	memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1779
	kvm_rip_write(ctxt->vcpu, c->eip);
1780 1781 1782 1783 1784 1785 1786

done:
	if (rc == X86EMUL_UNHANDLEABLE) {
		c->eip = saved_eip;
		return -1;
	}
	return 0;
A
Avi Kivity 已提交
1787 1788

twobyte_insn:
1789
	switch (c->b) {
A
Avi Kivity 已提交
1790
	case 0x01: /* lgdt, lidt, lmsw */
1791
		switch (c->modrm_reg) {
A
Avi Kivity 已提交
1792 1793 1794
			u16 size;
			unsigned long address;

1795
		case 0: /* vmcall */
1796
			if (c->modrm_mod != 3 || c->modrm_rm != 1)
1797 1798
				goto cannot_emulate;

1799 1800 1801 1802
			rc = kvm_fix_hypercall(ctxt->vcpu);
			if (rc)
				goto done;

1803
			/* Let the processor re-execute the fixed hypercall */
1804
			c->eip = kvm_rip_read(ctxt->vcpu);
1805 1806
			/* Disable writeback. */
			c->dst.type = OP_NONE;
1807
			break;
A
Avi Kivity 已提交
1808
		case 2: /* lgdt */
1809 1810
			rc = read_descriptor(ctxt, ops, c->src.ptr,
					     &size, &address, c->op_bytes);
A
Avi Kivity 已提交
1811 1812 1813
			if (rc)
				goto done;
			realmode_lgdt(ctxt->vcpu, size, address);
1814 1815
			/* Disable writeback. */
			c->dst.type = OP_NONE;
A
Avi Kivity 已提交
1816
			break;
1817
		case 3: /* lidt/vmmcall */
1818
			if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1819 1820 1821 1822
				rc = kvm_fix_hypercall(ctxt->vcpu);
				if (rc)
					goto done;
				kvm_emulate_hypercall(ctxt->vcpu);
1823
			} else {
1824
				rc = read_descriptor(ctxt, ops, c->src.ptr,
1825
						     &size, &address,
1826
						     c->op_bytes);
1827 1828 1829 1830
				if (rc)
					goto done;
				realmode_lidt(ctxt->vcpu, size, address);
			}
1831 1832
			/* Disable writeback. */
			c->dst.type = OP_NONE;
A
Avi Kivity 已提交
1833 1834
			break;
		case 4: /* smsw */
1835 1836
			c->dst.bytes = 2;
			c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
A
Avi Kivity 已提交
1837 1838
			break;
		case 6: /* lmsw */
1839 1840
			realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
				      &ctxt->eflags);
1841
			c->dst.type = OP_NONE;
A
Avi Kivity 已提交
1842 1843
			break;
		case 7: /* invlpg*/
1844
			emulate_invlpg(ctxt->vcpu, memop);
1845 1846
			/* Disable writeback. */
			c->dst.type = OP_NONE;
A
Avi Kivity 已提交
1847 1848 1849 1850 1851
			break;
		default:
			goto cannot_emulate;
		}
		break;
1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868
	case 0x06:
		emulate_clts(ctxt->vcpu);
		c->dst.type = OP_NONE;
		break;
	case 0x08:		/* invd */
	case 0x09:		/* wbinvd */
	case 0x0d:		/* GrpP (prefetch) */
	case 0x18:		/* Grp16 (prefetch/nop) */
		c->dst.type = OP_NONE;
		break;
	case 0x20: /* mov cr, reg */
		if (c->modrm_mod != 3)
			goto cannot_emulate;
		c->regs[c->modrm_rm] =
				realmode_get_cr(ctxt->vcpu, c->modrm_reg);
		c->dst.type = OP_NONE;	/* no writeback */
		break;
A
Avi Kivity 已提交
1869
	case 0x21: /* mov from dr to reg */
1870
		if (c->modrm_mod != 3)
A
Avi Kivity 已提交
1871
			goto cannot_emulate;
1872
		rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1873 1874 1875
		if (rc)
			goto cannot_emulate;
		c->dst.type = OP_NONE;	/* no writeback */
A
Avi Kivity 已提交
1876
		break;
1877 1878 1879 1880 1881 1882 1883
	case 0x22: /* mov reg, cr */
		if (c->modrm_mod != 3)
			goto cannot_emulate;
		realmode_set_cr(ctxt->vcpu,
				c->modrm_reg, c->modrm_val, &ctxt->eflags);
		c->dst.type = OP_NONE;
		break;
A
Avi Kivity 已提交
1884
	case 0x23: /* mov from reg to dr */
1885
		if (c->modrm_mod != 3)
A
Avi Kivity 已提交
1886
			goto cannot_emulate;
1887 1888
		rc = emulator_set_dr(ctxt, c->modrm_reg,
				     c->regs[c->modrm_rm]);
1889 1890 1891
		if (rc)
			goto cannot_emulate;
		c->dst.type = OP_NONE;	/* no writeback */
A
Avi Kivity 已提交
1892
		break;
1893 1894 1895 1896 1897 1898
	case 0x30:
		/* wrmsr */
		msr_data = (u32)c->regs[VCPU_REGS_RAX]
			| ((u64)c->regs[VCPU_REGS_RDX] << 32);
		rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
		if (rc) {
1899
			kvm_inject_gp(ctxt->vcpu, 0);
1900
			c->eip = kvm_rip_read(ctxt->vcpu);
1901 1902 1903 1904 1905 1906 1907 1908
		}
		rc = X86EMUL_CONTINUE;
		c->dst.type = OP_NONE;
		break;
	case 0x32:
		/* rdmsr */
		rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
		if (rc) {
1909
			kvm_inject_gp(ctxt->vcpu, 0);
1910
			c->eip = kvm_rip_read(ctxt->vcpu);
1911 1912 1913 1914 1915 1916 1917
		} else {
			c->regs[VCPU_REGS_RAX] = (u32)msr_data;
			c->regs[VCPU_REGS_RDX] = msr_data >> 32;
		}
		rc = X86EMUL_CONTINUE;
		c->dst.type = OP_NONE;
		break;
A
Avi Kivity 已提交
1918
	case 0x40 ... 0x4f:	/* cmov */
1919
		c->dst.val = c->dst.orig_val = c->src.val;
1920 1921
		if (!test_cc(c->b, ctxt->eflags))
			c->dst.type = OP_NONE; /* no writeback */
A
Avi Kivity 已提交
1922
		break;
1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940
	case 0x80 ... 0x8f: /* jnz rel, etc*/ {
		long int rel;

		switch (c->op_bytes) {
		case 2:
			rel = insn_fetch(s16, 2, c->eip);
			break;
		case 4:
			rel = insn_fetch(s32, 4, c->eip);
			break;
		case 8:
			rel = insn_fetch(s64, 8, c->eip);
			break;
		default:
			DPRINTF("jnz: Invalid op_bytes\n");
			goto cannot_emulate;
		}
		if (test_cc(c->b, ctxt->eflags))
1941
			jmp_rel(c, rel);
1942 1943 1944
		c->dst.type = OP_NONE;
		break;
	}
1945 1946
	case 0xa3:
	      bt:		/* bt */
Q
Qing He 已提交
1947
		c->dst.type = OP_NONE;
1948 1949
		/* only subword offset */
		c->src.val &= (c->dst.bytes << 3) - 1;
1950
		emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1951 1952 1953
		break;
	case 0xab:
	      bts:		/* bts */
1954 1955
		/* only subword offset */
		c->src.val &= (c->dst.bytes << 3) - 1;
1956
		emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1957
		break;
1958 1959
	case 0xae:              /* clflush */
		break;
A
Avi Kivity 已提交
1960 1961 1962 1963 1964
	case 0xb0 ... 0xb1:	/* cmpxchg */
		/*
		 * Save real source value, then compare EAX against
		 * destination.
		 */
1965 1966
		c->src.orig_val = c->src.val;
		c->src.val = c->regs[VCPU_REGS_RAX];
1967 1968
		emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
		if (ctxt->eflags & EFLG_ZF) {
A
Avi Kivity 已提交
1969
			/* Success: write back to memory. */
1970
			c->dst.val = c->src.orig_val;
A
Avi Kivity 已提交
1971 1972
		} else {
			/* Failure: write the value we saw to EAX. */
1973 1974
			c->dst.type = OP_REG;
			c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
A
Avi Kivity 已提交
1975 1976 1977 1978
		}
		break;
	case 0xb3:
	      btr:		/* btr */
1979 1980
		/* only subword offset */
		c->src.val &= (c->dst.bytes << 3) - 1;
1981
		emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
A
Avi Kivity 已提交
1982 1983
		break;
	case 0xb6 ... 0xb7:	/* movzx */
1984 1985 1986
		c->dst.bytes = c->op_bytes;
		c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
						       : (u16) c->src.val;
A
Avi Kivity 已提交
1987 1988
		break;
	case 0xba:		/* Grp8 */
1989
		switch (c->modrm_reg & 3) {
A
Avi Kivity 已提交
1990 1991 1992 1993 1994 1995 1996 1997 1998 1999
		case 0:
			goto bt;
		case 1:
			goto bts;
		case 2:
			goto btr;
		case 3:
			goto btc;
		}
		break;
2000 2001
	case 0xbb:
	      btc:		/* btc */
2002 2003
		/* only subword offset */
		c->src.val &= (c->dst.bytes << 3) - 1;
2004
		emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2005
		break;
A
Avi Kivity 已提交
2006
	case 0xbe ... 0xbf:	/* movsx */
2007 2008 2009
		c->dst.bytes = c->op_bytes;
		c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
							(s16) c->src.val;
A
Avi Kivity 已提交
2010
		break;
2011
	case 0xc3:		/* movnti */
2012 2013 2014
		c->dst.bytes = c->op_bytes;
		c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
							(u64) c->src.val;
2015
		break;
A
Avi Kivity 已提交
2016
	case 0xc7:		/* Grp9 (cmpxchg8b) */
2017
		rc = emulate_grp9(ctxt, ops, memop);
2018 2019
		if (rc != 0)
			goto done;
2020
		c->dst.type = OP_NONE;
2021
		break;
A
Avi Kivity 已提交
2022 2023 2024 2025
	}
	goto writeback;

cannot_emulate:
2026
	DPRINTF("Cannot emulate %02x\n", c->b);
2027
	c->eip = saved_eip;
A
Avi Kivity 已提交
2028 2029
	return -1;
}