- Fix incorrect comment, spotted by Timo.
[reactos.git] / reactos / ntoskrnl / ke / i386 / v86m_sup.S
1 /*
2 * FILE: ntoskrnl/ke/i386/v86m_sup.S
3 * COPYRIGHT: See COPYING in the top level directory
4 * PURPOSE: Virtual 8086 (V86) Mode Support
5 * PROGRAMMER: Alex Ionescu (alex@relsoft.net)
6 * NOTE: See asmmacro.S for the V86 trap code.
7 */
8
9 /* INCLUDES ******************************************************************/
10
11 #include <asm.h>
12 #include <internal/i386/asmmacro.S>
13 .intel_syntax noprefix
14
15 /* FIXME: Can we make a nice macro to generate V86 Opcode handlers? */
16
17 /* GLOBALS *******************************************************************/
18
19 //
20 // This table contains indexes into the OpcodeDispatchV86 Table for opcodes in
21 // Virtual-8086 Mode.
22 // There are 256 entries.
23 //
24 OpcodeIndex:
25 INVALID_V86_OPCODE 15 /* OP 00-14: UNHANDLED */
26 .byte 1 /* OP 0F: 0F */
27 INVALID_V86_OPCODE 22 /* OP 10-25: UNHANDLED */
28 .byte 2 /* OP 26: ES Prefix */
29 INVALID_V86_OPCODE 7 /* OP 27-2D: UNHANDLED */
30 .byte 3 /* OP 2E: CS Prefix */
31 INVALID_V86_OPCODE 7 /* OP 2F-35: UNHANDLED */
32 .byte 4 /* OP 36: SS Prefix */
33 INVALID_V86_OPCODE 7 /* OP 37-3D: UNHANDLED */
34 .byte 5 /* OP 3E: DS Prefix */
35 INVALID_V86_OPCODE 37 /* OP 3F-63: UNHANDLED */
36 .byte 6 /* OP 64: FS Prefix */
37 .byte 7 /* OP 65: GS Prefix */
38 .byte 8 /* OP 66: OPER32 Prefix */
39 .byte 9 /* OP 67: ADDR32 Prefix */
40 INVALID_V86_OPCODE 4 /* OP 68-6B: UNHANDLED */
41 .byte 10 /* OP 6C: INSB */
42 .byte 11 /* OP 6D: INSW */
43 .byte 12 /* OP 6E: OUTSB */
44 .byte 13 /* OP 6F: OUTSW */
45 INVALID_V86_OPCODE 43 /* OP 70-9A: UNHANDLED */
46 .byte 19 /* OP 9B: NPX */
47 .byte 14 /* OP 9C: PUSHF */
48 .byte 15 /* OP 9D: POPF */
49 INVALID_V86_OPCODE 47 /* OP 9E-CC: UNHANDLED */
50 .byte 16 /* OP CD: INTnn */
51 .byte 17 /* OP CE: INTO */
52 .byte 18 /* OP CF: IRETD */
53 INVALID_V86_OPCODE 8 /* OP D0-D7: UNHANDLED */
54 .byte 19 /* OP D8: NPX */
55 .byte 19 /* OP D9: NPX */
56 .byte 19 /* OP DA: NPX */
57 .byte 19 /* OP DB: NPX */
58 .byte 19 /* OP DC: NPX */
59 .byte 19 /* OP DD: NPX */
60 .byte 19 /* OP DE: NPX */
61 .byte 19 /* OP DF: NPX */
62 INVALID_V86_OPCODE 4 /* OP DE-E3: UNHANDLED */
63 .byte 20 /* OP E4: INBimm */
64 .byte 21 /* OP E5: INWimm */
65 .byte 22 /* OP E6: OUTBimm */
66 .byte 23 /* OP E7: OUTWimm */
67 INVALID_V86_OPCODE 4 /* OP E8-EB: UNHANDLED */
68 .byte 24 /* OP EC: INB */
69 .byte 25 /* OP EF: INW */
70 .byte 26 /* OP EE: OUTB */
71 .byte 27 /* OP EF: OUTW */
72 .byte 28 /* OP F0: LOCK Prefix */
73 .byte 0 /* OP F1: UNHANDLED */
74 .byte 29 /* OP F2: REPNE Prefix */
75 .byte 30 /* OP F3: REP Prefix */
76 .byte 33 /* OP F4: HLT */
77 INVALID_V86_OPCODE 5 /* OP F5-F9: UNHANDLED */
78 .byte 31 /* OP FA: CLI */
79 .byte 32 /* OP FB: STI */
80 INVALID_V86_OPCODE 4 /* OP FC-FF: UNHANDLED */
81
82 //
83 // This table contains the emulation routines for
84 // Virtual-8086 Mode. There are 34 entries.
85 //
86 OpcodeDispatchV86:
87 .long _OpcodeInvalidV86
88 .long _Opcode0FV86
89 .long _OpcodeESPrefixV86
90 .long _OpcodeCSPrefixV86
91 .long _OpcodeSSPrefixV86
92 .long _OpcodeDSPrefixV86
93 .long _OpcodeFSPrefixV86
94 .long _OpcodeGSPrefixV86
95 .long _OpcodeOPER32PrefixV86
96 .long _OpcodeADDR32PrefixV86
97 .long _OpcodeINSBV86
98 .long _OpcodeINSWV86
99 .long _OpcodeOUTSBV86
100 .long _OpcodeOUTSWV86
101 .long _OpcodePUSHFV86
102 .long _OpcodePOPFV86
103 .long _OpcodeINTnnV86
104 .long _OpcodeINTOV86
105 .long _OpcodeIRETV86
106 .long _OpcodeNPXV86
107 .long _OpcodeINBimmV86
108 .long _OpcodeINWimmV86
109 .long _OpcodeOUTBimmV86
110 .long _OpcodeOUTWimmV86
111 .long _OpcodeINBV86
112 .long _OpcodeINWV86
113 .long _OpcodeOUTBV86
114 .long _OpcodeOUTWV86
115 .long _OpcodeLOCKPrefixV86
116 .long _OpcodeREPNEPrefixV86
117 .long _OpcodeREPPrefixV86
118 .long _OpcodeCLIV86
119 .long _OpcodeSTIV86
120 .long _OpcodeHLTV86
121
122 _ExVdmOpcodeDispatchCounts:
123 .rept 34
124 .long 0
125 .endr
126
127 V86DebugMsg:
128 .asciz "Received V86 Emulation Opcode: %lx\n"
129
130 /* VIRTUAL-8086 MODE OPCODER HANDLERS ****************************************/
131
132 .func OpcodeInvalidV86
133 _OpcodeInvalidV86:
134 UNHANDLED_V86_OPCODE
135 .endfunc
136
137 .func Opcode0FV86
138 _Opcode0FV86:
139 UNHANDLED_V86_OPCODE
140 .endfunc
141
142 #undef LOCK
143
144 GENERATE_PREFIX_HANDLER ES
145 GENERATE_PREFIX_HANDLER CS
146 GENERATE_PREFIX_HANDLER DS
147 GENERATE_PREFIX_HANDLER FS
148 GENERATE_PREFIX_HANDLER GS
149 GENERATE_PREFIX_HANDLER SS
150 GENERATE_PREFIX_HANDLER OPER32
151 GENERATE_PREFIX_HANDLER ADDR32
152 GENERATE_PREFIX_HANDLER LOCK
153 GENERATE_PREFIX_HANDLER REP
154 GENERATE_PREFIX_HANDLER REPNE
155
156 #ifdef CONFIG_SMP
157 #define LOCK lock
158 #else
159 #define LOCK
160 #endif
161
162 .func OpcodeINSBV86
163 _OpcodeINSBV86:
164 UNHANDLED_V86_OPCODE
165 .endfunc
166
167 .func OpcodeINSWV86
168 _OpcodeINSWV86:
169 UNHANDLED_V86_OPCODE
170 .endfunc
171
172 .func OpcodeOUTSBV86
173 _OpcodeOUTSBV86:
174 UNHANDLED_V86_OPCODE
175 .endfunc
176
177 .func OpcodeOUTSWV86
178 _OpcodeOUTSWV86:
179 UNHANDLED_V86_OPCODE
180 .endfunc
181
182 .func OpcodePUSHFV86
183 _OpcodePUSHFV86:
184
185 /* Get VDM state */
186 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
187 mov eax, [eax]
188
189 /* Get EFLAGS and mask out IF */
190 mov edx, [ebp+KTRAP_FRAME_EFLAGS]
191 and eax, ~EFLAGS_INTERRUPT_MASK
192
193 /* Mask align check and interrupt mask */
194 and eax, EFLAGS_ALIGN_CHECK + EFLAGS_NESTED_TASK + EFLAGS_INTERRUPT_MASK
195 or eax, edx
196
197 /* Add IOPL Mask */
198 or eax, EFLAGS_IOPL
199
200 /* Get flat ESP */
201 movzx ecx, word ptr [ebp+KTRAP_FRAME_SS]
202 shl ecx, 4
203 movzx edx, word ptr [ebp+KTRAP_FRAME_ESP]
204 sub dx, 2
205
206 /* Check if there is an OPER32 prefix */
207 test ebx, PREFIX_FLAG_OPER32
208 jnz SkipPrefix
209
210 /* Push EFLAGS */
211 mov [ecx+edx], ax
212
213 UpdateFrame:
214
215 /* Update ESP and EIP */
216 mov [ebp+KTRAP_FRAME_ESP], dx
217 add [ebp+KTRAP_FRAME_EIP], edi
218
219 /* Return success */
220 mov eax, 1
221 ret
222
223 SkipPrefix:
224
225 /* Skip the prefix, push EFLAGS and jump back */
226 sub dx, 2
227 mov [edx+ecx], eax
228 jmp UpdateFrame
229 .endfunc
230
231 .func OpcodePOPFV86
232 _OpcodePOPFV86:
233
234 /* Get VDM state */
235 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
236
237 /* Get flat ESP */
238 mov ecx, [ebp+KTRAP_FRAME_SS]
239 shl ecx, 4
240 movzx edx, word ptr [ebp+KTRAP_FRAME_ESP]
241
242 /* Pop EFLAGS */
243 mov ecx, [ecx+edx]
244 add edx, 4
245
246 /* Check for OPER32 prefix */
247 test ebx, PREFIX_FLAG_OPER32
248 jnz NoPrefix
249
250 /* Skip 2 bytes */
251 and ecx, 0xFFFF
252 sub edx, 2
253
254 NoPrefix:
255
256 /* Set new ESP */
257 mov [ebp+KTRAP_FRAME_ESP], edx
258
259 /* Mask out EFLAGS */
260 and eax, ~EFLAGS_IOPL
261 mov ebx, ebx
262 and ebx, ~EFLAGS_NESTED_TASK
263 and ecx, EFLAGS_ALIGN_CHECK + EFLAGS_NESTED_TASK + EFLAGS_INTERRUPT_MASK
264
265 /* FIXME: Support VME */
266
267 /* Save VDM State pointer */
268 push eax
269
270 /* Set new EFLAGS, make sure to add IF and V86 */
271 or ebx, EFLAGS_INTERRUPT_MASK + EFLAGS_V86_MASK
272 push [ebp+KTRAP_FRAME_EFLAGS]
273 mov [ebp+KTRAP_FRAME_EFLAGS], ebx
274
275 /* Make sure we were in V86 mode */
276 test ebx, EFLAGS_V86_MASK
277 jnz CheckEspAdjust
278 int 3
279
280 CheckEspAdjust:
281
282 /* Check if we have to update ESP0 and fixup the stack from our push */
283 test dword ptr [ebp+KTRAP_FRAME_EFLAGS], EFLAGS_V86_MASK
284 lea esp, [esp+4]
285 jnz NoAdjustEsp0
286
287 /* Adjust it */
288 push ebp
289 call _Ki386AdjustEsp0@4
290
291 NoAdjustEsp0:
292
293 /* Restore VDM state */
294 pop eax
295
296 /* Update the flags in the VDM State */
297 LOCK and dword ptr [eax], ~(EFLAGS_ALIGN_CHECK + EFLAGS_NESTED_TASK + EFLAGS_INTERRUPT_MASK)
298 LOCK or [eax], ecx
299
300 /* Update EIP */
301 add [ebp+KTRAP_FRAME_EIP], edi
302
303 /* FIXME: Check for VDM Pending interrupts */
304
305 /* Return success */
306 mov eax, 1
307 ret
308 .endfunc
309
310 .func OpcodeINTnnV86
311 _OpcodeINTnnV86:
312
313 /* Get EFlags */
314 mov edx, [ebp+KTRAP_FRAME_EFLAGS]
315
316 /* Remove the flag in the VDM State */
317 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
318 mov ecx, [eax]
319 LOCK and dword ptr [eax], ~EFLAGS_INTERRUPT_MASK
320
321 /* Mask it out from EFLAGS too */
322 mov eax, edx
323 and eax, ~EFLAGS_INTERRUPT_MASK
324
325 /* Mask out the alignment check and IF flag from the VDM state */
326 and ecx, EFLAGS_ALIGN_CHECK + EFLAGS_INTERRUPT_MASK
327
328 /* FIXME: Support VME */
329
330 /* Now mask out VIF and TF */
331 or eax, ecx
332 and edx, ~(EFLAGS_VIF + EFLAGS_NESTED_TASK + EFLAGS_TF)
333 mov [ebp+KTRAP_FRAME_EFLAGS], edx
334
335 /* Set the IOPL Mask */
336 or eax, EFLAGS_IOPL
337
338 /* Get stack flat address */
339 movzx ecx, word ptr [ebp+KTRAP_FRAME_SS]
340 shl ecx, 4
341 movzx edx, word ptr [ebp+KTRAP_FRAME_ESP]
342
343 /* Push EFLAGS */
344 sub dx, 2
345 mov word ptr [ecx+edx], ax
346
347 /* Push CS */
348 mov ax, word ptr [ebp+KTRAP_FRAME_CS]
349 sub dx, 2
350 mov word ptr [ecx+edx], ax
351
352 /* Push IP */
353 movzx eax, word ptr [ebp+KTRAP_FRAME_EIP]
354 add eax, edi
355 inc eax
356 sub dx, 2
357 mov word ptr [ecx+edx], ax
358
359 /* Update ESP */
360 mov [ebp+KTRAP_FRAME_ESP], dx
361
362 /* Get the interrupt */
363 inc esi
364 movzx ecx, byte ptr [esi]
365 /* FIXME: Analyze and see if this is a hooked VDM (PM) Interrupt */
366
367 /* Get the entry in the IVT */
368 mov ebx, [ecx*4]
369 mov eax, ebx
370 shr eax, 16
371
372 /* Update EIP */
373 mov word ptr [ebp+KTRAP_FRAME_EIP], bx
374
375 /* Check if this was V86 mode */
376 test dword ptr [ebp+KTRAP_FRAME_EFLAGS], EFLAGS_V86_MASK
377 jnz SetCs
378
379 /* Check if it was a kernel CS */
380 or ax, RPL_MASK
381 cmp ax, KGDT_R0_CODE
382 jnb SetCs
383
384 /* Set user-mode CS */
385 mov ax, KGDT_R3_CODE + RPL_MASK
386
387 SetCs:
388 /* Set new CS */
389 mov [ebp+KTRAP_FRAME_CS], ax
390
391 /* Return success */
392 mov eax, 1
393 ret
394 .endfunc
395
396 .func OpcodeINTOV86
397 _OpcodeINTOV86:
398 UNHANDLED_V86_OPCODE
399 .endfunc
400
401 .func OpcodeIRETV86
402 _OpcodeIRETV86:
403
404 /* Get the VDM State */
405 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
406
407 /* Get flat ESP */
408 movzx ecx, word ptr [ebp+KTRAP_FRAME_SS]
409 shl ecx, 4
410 movzx edx, word ptr [ebp+KTRAP_FRAME_ESP]
411 add ecx, edx
412
413 /* Check for OPER32 prefix */
414 test ebx, PREFIX_FLAG_OPER32
415 jnz Iret32
416
417 /* Get flat IP */
418 movzx edi, word ptr [ecx]
419 mov [ebp+KTRAP_FRAME_EIP], edi
420 movzx esi, word ptr [ecx+2]
421 mov [ebp+KTRAP_FRAME_CS], esi
422
423 /* Update ESP */
424 add edx, 6
425 mov [ebp+KTRAP_FRAME_ESP], edx
426
427 /* Get EFLAGS */
428 movzx ebx, word ptr [ecx+4]
429
430 MaskEFlags:
431
432 /* Mask out EFLAGS */
433 and ebx, ~(EFLAGS_IOPL + EFLAGS_VIF + EFLAGS_NESTED_TASK + EFLAGS_VIP)
434 mov ecx, ebx
435
436 /* FIXME: Check for VME support */
437
438 /* Save VDM state */
439 push eax
440
441 /* Enable V86 and Interrupts */
442 or ebx, EFLAGS_V86_MASK + EFLAGS_INTERRUPT_MASK
443
444 /* Save old flags */
445 mov eax, [ebp+KTRAP_FRAME_EFLAGS]
446 push eax
447
448 /* Mask out VIP and set new eflags */
449 and eax, EFLAGS_VIP
450 or eax, ebx
451 mov [ebp+KTRAP_FRAME_EFLAGS], eax
452
453 /* Check if we have to update ESP0 */
454 pop ebx
455 test ebx, EFLAGS_V86_MASK
456 jnz NoEsp0Update
457
458 /* Save ECX and ECX */
459 push ecx
460 push edx
461
462 /* Update esp0 and restore registers */
463 push ebp
464 call _Ki386AdjustEsp0@4
465 pop edx
466 pop ecx
467
468 NoEsp0Update:
469
470 /* Put VDM state in EAX and update VDM EFlags */
471 pop eax
472 and ecx, EFLAGS_INTERRUPT_MASK
473 LOCK and dword ptr [eax], ~EFLAGS_INTERRUPT_MASK
474 LOCK or [eax], ecx
475
476 /* Get flat EIP and check if this is the BOP */
477 shl esi, 4
478 add esi, edi
479 mov ax, [esi]
480 cmp ax, 0xC4C4
481 jz IsBop
482
483 /* FIXME: Check for VDM interrupts */
484
485 /* Return success */
486 RetIret:
487 mov eax, 1
488 ret
489
490 IsBop:
491
492 /* Call the BOP handler */
493 push ebp
494 call _VdmDispatchBop@4
495 jmp RetIret
496
497 Iret32:
498
499 /* Get 32-bit flat EIP */
500 mov edi, [ecx]
501 mov [ebp+KTRAP_FRAME_EIP], edi
502 movzx esi, word ptr [ecx+4]
503 mov [ebp+KTRAP_FRAME_CS], esi
504
505 /* Set new ESP */
506 add edx, 12
507 mov [ebp+KTRAP_FRAME_ESP], edx
508
509 /* Get EFLAGS and continue */
510 mov ebx, [ecx+8]
511 jmp MaskEFlags
512 .endfunc
513
514 .func OpcodeNPXV86
515 _OpcodeNPXV86:
516 UNHANDLED_V86_OPCODE
517 .endfunc
518
519 .func OpcodeINBimmV86
520 _OpcodeINBimmV86:
521 UNHANDLED_V86_OPCODE
522 .endfunc
523
524 .func OpcodeINWimmV86
525 _OpcodeINWimmV86:
526 UNHANDLED_V86_OPCODE
527 .endfunc
528
529 .func OpcodeOUTBimmV86
530 _OpcodeOUTBimmV86:
531 UNHANDLED_V86_OPCODE
532 .endfunc
533
534 .func OpcodeOUTWimmV86
535 _OpcodeOUTWimmV86:
536 UNHANDLED_V86_OPCODE
537 .endfunc
538
539 .func OpcodeINBV86
540 _OpcodeINBV86:
541 UNHANDLED_V86_OPCODE
542 .endfunc
543
544 .func OpcodeINWV86
545 _OpcodeINWV86:
546 UNHANDLED_V86_OPCODE
547 .endfunc
548
549 .func OpcodeOUTBV86
550 _OpcodeOUTBV86:
551 UNHANDLED_V86_OPCODE
552 .endfunc
553
554 .func OpcodeOUTWV86
555 _OpcodeOUTWV86:
556 UNHANDLED_V86_OPCODE
557 .endfunc
558
559 .func OpcodeCLIV86
560 _OpcodeCLIV86:
561
562 /* Get VDM State */
563 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
564
565 /* FIXME: Support VME */
566
567 /* FIXME: Support VDM Interrupts */
568
569 /* Disable interrupts */
570 LOCK and dword ptr [eax], ~EFLAGS_INTERRUPT_MASK
571
572 /* Update EIP (remember EDI == instruction size) */
573 add [ebp+KTRAP_FRAME_EIP], edi
574
575 /* Return success */
576 mov eax, 1
577 ret
578 .endfunc
579
580 .func OpcodeSTIV86
581 _OpcodeSTIV86:
582
583 /* Get VDM State */
584 mov eax, FIXED_NTVDMSTATE_LINEAR_PC_AT
585
586 /* FIXME: Support VME */
587
588 /* Enable interrupts */
589 LOCK or dword ptr [eax], EFLAGS_INTERRUPT_MASK
590
591 /* Update EIP (remember EDI == instruction size) */
592 add [ebp+KTRAP_FRAME_EIP], edi
593
594 /* FIXME: Support VDM Interrupts */
595
596 /* Return success */
597 mov eax, 1
598 ret
599 .endfunc
600
601 .func OpcodeGenericPrefixV86
602 _OpcodeGenericPrefixV86:
603
604 /* Skip instruction */
605 inc esi
606 inc edi
607
608 /* Get the instruction */
609 movzx ecx, byte ptr [esi]
610
611 /* Get the opcode index */
612 movzx edx, byte ptr OpcodeIndex[ecx]
613
614 /* Dispatch it */
615 jmp OpcodeDispatchV86[edx*4]
616 .endfunc
617
618 .func OpcodeHLTV86
619 _OpcodeHLTV86:
620 UNHANDLED_V86_OPCODE
621 .endfunc
622
623 /* FUNCTIONS *****************************************************************/
624
625 .globl _Ki386SetupAndExitToV86Mode@4
626 .func Ki386SetupAndExitToV86Mode@4
627 _Ki386SetupAndExitToV86Mode@4:
628
629 /* Save nonvolatiles */
630 push ebp
631 push ebx
632 push esi
633 push edi
634
635 /* Give us a little stack */
636 sub esp, 12
637 mov ecx, esp
638
639 /* Go past the KTRAP_FRAME and NPX Frame and set a new frame in EAX */
640 sub esp, NPX_FRAME_LENGTH
641 and esp, ~15
642 sub esp, KTRAP_FRAME_LENGTH
643 mov eax, esp
644
645 /* Create a fake user-mode frame */
646 mov dword ptr [eax+KTRAP_FRAME_CS], KGDT_R0_CODE + RPL_MASK
647 mov dword ptr [eax+KTRAP_FRAME_ES], 0
648 mov dword ptr [eax+KTRAP_FRAME_DS], 0
649 mov dword ptr [eax+KTRAP_FRAME_FS], 0
650 mov dword ptr [eax+KTRAP_FRAME_GS], 0
651 mov dword ptr [eax+KTRAP_FRAME_ERROR_CODE], 0
652
653 /* Get the current thread's initial stack */
654 mov ebx, [fs:KPCR_SELF]
655 mov edi, [ebx+KPCR_CURRENT_THREAD]
656 mov edx, [edi+KTHREAD_INITIAL_STACK]
657 sub edx, NPX_FRAME_LENGTH
658
659 /* Save it on our stack, as well as the real TEB addresses */
660 mov [ecx], edx
661 mov edx, [edi+KTHREAD_TEB]
662 mov [ecx+4], edx
663 mov edx, [fs:KPCR_TEB]
664 mov [ecx+8] , edx
665
666 /* Set our ESP in ESI, and the return function in EIP */
667 mov edi, offset _Ki386BiosCallReturnAddress
668 mov [eax+KTRAP_FRAME_ESI], ecx
669 mov [eax+KTRAP_FRAME_EIP], edi
670
671 /* Push the flags and sanitize them */
672 pushfd
673 pop edi
674 and edi, 0x60DD7
675 or edi, EFLAGS_INTERRUPT_MASK
676
677 /* Set SS and ESP, and fill out the rest of the frame */
678 mov dword ptr [eax+KTRAP_FRAME_SS], KGDT_R3_DATA + RPL_MASK;
679 mov dword ptr [eax+KTRAP_FRAME_ESP], 0x11FFE;
680 mov dword ptr [eax+KTRAP_FRAME_EFLAGS], edi
681 mov dword ptr [eax+KTRAP_FRAME_EXCEPTION_LIST], -1
682 mov dword ptr [eax+KTRAP_FRAME_PREVIOUS_MODE], -1
683 mov dword ptr [eax+KTRAP_FRAME_DR7], 0
684 mov dword ptr [eax+KTRAP_FRAME_DEBUGARGMARK], 0xBADB0D00
685
686 /* Jump past the frame now */
687 add eax, KTRAP_FRAME_LENGTH
688 cli
689
690 /* Save the current stack */
691 push ecx
692
693 /* Get the current thread's intial stack again */
694 mov edi, [ebx+KPCR_CURRENT_THREAD]
695 mov esi, [edi+KTHREAD_INITIAL_STACK]
696 sub esi, NPX_FRAME_LENGTH
697
698 /* Set the size of the copy, and the destination, and copy the NPX frame */
699 mov ecx, NPX_FRAME_LENGTH / 4
700 mov edi, eax
701 rep movsd
702
703 /* Restore stack */
704 pop ecx
705
706 /* Get the current thread and TSS */
707 mov edi, [ebx+KPCR_CURRENT_THREAD]
708 mov esi, [ebx+KPCR_TSS]
709
710 /* Bias the V86 vrame */
711 sub eax, KTRAP_FRAME_V86_GS - KTRAP_FRAME_SS
712
713 /* Set exception list and new ESP */
714 mov dword ptr [ebx+KPCR_EXCEPTION_LIST], -1
715 mov [esi+KTSS_ESP0], eax
716
717 /* Now skip past the NPX frame and V86 fields and set this as the intial stack */
718 add eax, NPX_FRAME_LENGTH + (KTRAP_FRAME_V86_GS - KTRAP_FRAME_SS)
719 mov [edi+KTHREAD_INITIAL_STACK], eax
720
721 /* Setup our fake TEB pointer */
722 mov eax, [ecx+0x20]
723 mov [fs:KPCR_TEB], eax
724 mov [edi+KTHREAD_TEB], eax
725
726 /* Setup the descriptors for the fake TEB */
727 mov ebx, [fs:KPCR_GDT]
728 mov [ebx+0x3A], ax
729 shr eax, 16
730 mov [ebx+0x3C], al
731 mov [ebx+0x3F], ah
732 sti
733
734 /*
735 * Start VDM execution. This will save this fake 32-bit KTRAP_FRAME and
736 * initialize a real 16-bit VDM context frame
737 */
738 push 0
739 push 0 // VdmStartExecution
740 call _NtVdmControl@8
741
742 /* Exit to V86 mode */
743 mov ebp, esp
744 jmp _Kei386EoiHelper@0
745 .endfunc
746
747 .globl _Ki386BiosCallReturnAddress
748 .func Ki386BiosCallReturnAddress
749 _Ki386BiosCallReturnAddress:
750
751 /* Get the PCR */
752 mov eax, [fs:KPCR_SELF]
753
754 /* Get NPX destination */
755 mov edi, [ebp+KTRAP_FRAME_ESI]
756 mov edi, [edi]
757
758 /* Get initial stack */
759 mov ecx, [eax+KPCR_CURRENT_THREAD]
760 mov esi, [ecx+KTHREAD_INITIAL_STACK]
761 sub esi, NPX_FRAME_LENGTH
762
763 /* Set length and copy the NPX frame */
764 mov ecx, NPX_FRAME_LENGTH / 4
765 rep movsd
766
767 /* Restore stack */
768 mov esp, [ebp+KTRAP_FRAME_ESI]
769 add esp, 4
770
771 /* Set initial stack */
772 mov ecx, [eax+KPCR_CURRENT_THREAD]
773 mov [ecx+KTHREAD_INITIAL_STACK], edi
774
775 /* Get TSS and set the ESP 0 */
776 mov eax, [eax+KPCR_TSS]
777 sub edi, NPX_FRAME_LENGTH + (KTRAP_FRAME_V86_GS - KTRAP_FRAME_SS)
778 mov [eax+KTSS_ESP0], edi
779
780 /* Restore KTHREAD TEB in EDX */
781 pop edx
782 mov [ecx+KTHREAD_TEB], edx
783
784 /* Restore PCR TEB in EDX */
785 pop edx
786 mov [fs:KPCR_TEB], edx
787
788 /* Setup the descriptors for the real TEB */
789 mov ebx, [fs:KPCR_GDT]
790 mov [ebx+0x3A], dx
791 shr edx, 16
792 mov [ebx+0x3C], dl
793 mov [ebx+0x3F], dh
794
795 /* Enable interrupts and pop back non-volatiles */
796 sti
797 pop edi
798 pop esi
799 pop ebx
800 pop ebp
801 ret 4
802 .endfunc
803
804 .globl _Ki386HandleOpcodeV86@0
805 .func Ki386HandleOpcodeV86@0
806 _Ki386HandleOpcodeV86@0:
807
808 /* Get flat EIP */
809 mov esi, [ebp+KTRAP_FRAME_CS]
810 shl esi, 4
811 add esi, [ebp+KTRAP_FRAME_EIP]
812
813 /* Get the opcode entry in the table */
814 movzx ecx, byte ptr [esi]
815 movzx edx, byte ptr OpcodeIndex[ecx]
816
817 /* Set instruction length and prefix flags */
818 mov edi, 1
819 xor ebx, ebx
820
821 /* Accounting statistics */
822 // inc dword ptr _ExVdmOpcodeDispatchCounts[edx*4] // FIXME: Generates protection fault
823
824 /* Handle the opcode */
825 jmp OpcodeDispatchV86[edx*4]
826 .endfunc