kernel pagefault context switching bug
dgruss opened this issue · 9 comments
fc06a8c reveals a bug in this method:
https://github.com/IAIK/sweb/blob/master/arch/x86/32/common/source/ArchInterrupts.cpp#L119
extern "C" void arch_saveThreadRegisters(uint32 error)
{
register struct context_switch_registers* registers;
registers = (struct context_switch_registers*) (&error + 2);
register struct interrupt_registers* iregisters;
iregisters = (struct interrupt_registers*) (&error + 2 + sizeof(struct context_switch_registers)/sizeof(uint32) + (error));
register ArchThreadInfo* info = currentThreadInfo;
asm("fnsave (%[fpu])\n"
"frstor (%[fpu])\n"
:
: [fpu]"r"(&info->fpu));
if (iregisters->cs & 0x3)
{
info->ss = iregisters->ss3;
info->esp = iregisters->esp3;
}
else
{
info->esp = registers->esp + 0xc;
}
info->eip = iregisters->eip;
info->cs = iregisters->cs;
info->eflags = iregisters->eflags;
info->eax = registers->eax;
info->ecx = registers->ecx;
info->edx = registers->edx;
info->ebx = registers->ebx;
info->ebp = registers->ebp;
info->esi = registers->esi;
info->edi = registers->edi;
info->ds = registers->ds;
info->es = registers->es;
assert(!currentThread || currentThread->stack_[0] == STACK_CANARY);
}
when provoking a pagefault in kernel, the pagefault is not exited correctly and therefore crashes or stack corruption (or even no error) might occur...
git bisect showed that the bug was introduced in 38b450e
for comparison, the old code which was probably working correctly is:
extern g_tss;
;;;;;; this one is different from the mona approach
;;;;;; they have one global pointer to the current thread
;;;;;; we however have two of them, one to the thread
;;;;;; and one to the thread info
;;;;;; This is way more robust against changes in the thread class
extern currentThreadInfo
extern blubbabla;
global arch_saveThreadRegisters
arch_saveThreadRegisters:
; mov eax, dword[currentThreadInfo]
; mov ebx, dword[eax + 0 ] ; ArchThreadInfo
mov ebx, dword[currentThreadInfo]
fnsave [ebx + 80]
frstor [ebx + 80]
mov eax, dword[esp + 48] ; get cs
and eax, 0x03 ; check cpl is 3
cmp eax, 0x03
je from_user
from_kernel:
mov eax, dword [esp + 44]; save eip
mov dword[ebx], eax
mov eax, dword [esp + 48]; save cs
mov dword[ebx + 4], eax
mov eax, dword [esp + 52]; save eflags
mov dword[ebx + 8], eax
mov eax, dword [esp + 40]; save eax
mov dword[ebx + 12], eax
mov eax, dword [esp + 36]; save ecx
mov dword[ebx + 16], eax
mov eax, dword [esp + 32]; save edx
mov dword[ebx + 20], eax
mov eax, dword [esp + 28]; save ebx
mov dword[ebx + 24], eax
mov eax, dword [esp + 24]; save esp
add eax, 0xc
mov dword[ebx + 28], eax
mov eax, dword [esp + 20]; save ebp
mov dword[ebx + 32], eax
mov eax, dword [esp + 16]; save esi
mov dword[ebx + 36], eax
mov eax, dword [esp + 12]; save edi
mov dword[ebx + 40], eax
mov eax, [esp + 8] ; save ds
mov dword[ebx + 44], eax
mov eax, [esp + 4] ; save es
mov dword[ebx + 48], eax
ret
from_user:
mov eax, dword[esp + 60] ; save ss3
mov dword[ebx + 60], eax
mov eax, dword[esp + 56] ; save esp3
mov dword[ebx + 28], eax
mov eax, dword [esp + 44]; save eip
mov dword[ebx], eax
mov eax, dword [esp + 48]; save cs
mov dword[ebx + 4], eax
mov eax, dword [esp + 52]; save eflags
mov dword[ebx + 8], eax
mov eax, dword [esp + 40]; save eax
mov dword[ebx + 12], eax
mov eax, dword [esp + 36]; save ecx
mov dword[ebx + 16], eax
mov eax, dword [esp + 32]; save edx
mov dword[ebx + 20], eax
mov eax, dword [esp + 28]; save ebx
mov dword[ebx + 24], eax
mov eax, dword [esp + 20]; save ebp
mov dword[ebx + 32], eax
mov eax, dword [esp + 16]; save esi
mov dword[ebx + 36], eax
mov eax, dword [esp + 12]; save edi
mov dword[ebx + 40], eax
mov eax, [esp + 8] ; save ds
mov dword[ebx + 44], eax
mov eax, [esp + 4] ; save es
mov dword[ebx + 48], eax
ret
i also never liked this part:
registers = (struct context_switch_registers*) (&error + 2);
register struct interrupt_registers* iregisters;
iregisters = (struct interrupt_registers*) (&error + 2 + sizeof(struct context_switch_registers)/sizeof(uint32) + (error));
+2 is really ugly...
new version, as generated by g++-4.9.1 on my system:
80011166: 55 push %ebp
80011167: 89 e5 mov %esp,%ebp
80011169: 57 push %edi
8001116a: 56 push %esi
8001116b: 53 push %ebx
8001116c: 83 ec 0c sub $0xc,%esp
register struct context_switch_registers* registers;
registers = (struct context_switch_registers*) (&error + 2);
8001116f: 8d 75 10 lea 0x10(%ebp),%esi
register struct interrupt_registers* iregisters;
iregisters = (struct interrupt_registers*) (&error + 2 + sizeof(struct context_switch_registers)/sizeof(uint32) + (error));
80011172: ba 0a 00 00 00 mov $0xa,%edx
80011177: 8b 45 08 mov 0x8(%ebp),%eax
8001117a: 01 d0 add %edx,%eax
8001117c: 83 c0 02 add $0x2,%eax
8001117f: c1 e0 02 shl $0x2,%eax
80011182: 8d 4d 08 lea 0x8(%ebp),%ecx
80011185: 8d 3c 01 lea (%ecx,%eax,1),%edi
register ArchThreadInfo* info = currentThreadInfo;
80011188: 8b 1d 0c 40 04 80 mov 0x8004400c,%ebx
asm("fnsave (%[fpu])\n"
"frstor (%[fpu])\n"
:
: [fpu]"r"(&info->fpu));
8001118e: 8d 43 50 lea 0x50(%ebx),%eax
80011191: dd 30 fnsave (%eax)
80011193: dd 20 frstor (%eax)
if (iregisters->cs & 0x3)
80011195: 8b 47 04 mov 0x4(%edi),%eax
80011198: 83 e0 03 and $0x3,%eax
8001119b: 85 c0 test %eax,%eax
8001119d: 74 0e je 800111ad <arch_saveThreadRegisters+0x47>
{
info->ss = iregisters->ss3;
8001119f: 8b 47 10 mov 0x10(%edi),%eax
800111a2: 89 43 3c mov %eax,0x3c(%ebx)
info->esp = iregisters->esp3;
800111a5: 8b 47 0c mov 0xc(%edi),%eax
800111a8: 89 43 1c mov %eax,0x1c(%ebx)
800111ab: eb 09 jmp 800111b6 <arch_saveThreadRegisters+0x50>
}
else
{
info->esp = registers->esp + 0xc;
800111ad: 8b 46 14 mov 0x14(%esi),%eax
800111b0: 83 c0 0c add $0xc,%eax
800111b3: 89 43 1c mov %eax,0x1c(%ebx)
}
info->eip = iregisters->eip;
800111b6: 8b 07 mov (%edi),%eax
800111b8: 89 03 mov %eax,(%ebx)
info->cs = iregisters->cs;
800111ba: 8b 47 04 mov 0x4(%edi),%eax
800111bd: 89 43 04 mov %eax,0x4(%ebx)
info->eflags = iregisters->eflags;
800111c0: 8b 47 08 mov 0x8(%edi),%eax
800111c3: 89 43 08 mov %eax,0x8(%ebx)
info->eax = registers->eax;
800111c6: 8b 46 24 mov 0x24(%esi),%eax
800111c9: 89 43 0c mov %eax,0xc(%ebx)
info->ecx = registers->ecx;
800111cc: 8b 46 20 mov 0x20(%esi),%eax
800111cf: 89 43 10 mov %eax,0x10(%ebx)
info->edx = registers->edx;
800111d2: 8b 46 1c mov 0x1c(%esi),%eax
800111d5: 89 43 14 mov %eax,0x14(%ebx)
info->ebx = registers->ebx;
800111d8: 8b 46 18 mov 0x18(%esi),%eax
800111db: 89 43 18 mov %eax,0x18(%ebx)
info->ebp = registers->ebp;
800111de: 8b 46 10 mov 0x10(%esi),%eax
800111e1: 89 43 20 mov %eax,0x20(%ebx)
info->esi = registers->esi;
800111e4: 8b 46 0c mov 0xc(%esi),%eax
800111e7: 89 43 24 mov %eax,0x24(%ebx)
info->edi = registers->edi;
800111ea: 8b 46 08 mov 0x8(%esi),%eax
800111ed: 89 43 28 mov %eax,0x28(%ebx)
info->ds = registers->ds;
800111f0: 8b 46 04 mov 0x4(%esi),%eax
800111f3: 89 43 2c mov %eax,0x2c(%ebx)
info->es = registers->es;
800111f6: 8b 06 mov (%esi),%eax
800111f8: 89 43 30 mov %eax,0x30(%ebx)
assert(!currentThread || currentThread->stack_[0] == STACK_CANARY);
800111fb: a1 10 40 04 80 mov 0x80044010,%eax
80011200: 85 c0 test %eax,%eax
80011202: 74 26 je 8001122a <arch_saveThreadRegisters+0xc4>
80011204: a1 10 40 04 80 mov 0x80044010,%eax
80011209: 8b 40 0c mov 0xc(%eax),%eax
8001120c: 3d ad de ad de cmp $0xdeaddead,%eax
80011211: 74 17 je 8001122a <arch_saveThreadRegisters+0xc4>
80011213: 83 ec 04 sub $0x4,%esp
80011216: 68 e0 74 02 80 push $0x800274e0
8001121b: 68 97 00 00 00 push $0x97
80011220: 68 1c 75 02 80 push $0x8002751c
80011225: e8 71 c9 ff ff call 8000db9b <sweb_assert>
}
8001122a: 8d 65 f4 lea -0xc(%ebp),%esp
8001122d: 5b pop %ebx
8001122e: 5e pop %esi
8001122f: 5f pop %edi
80011230: 5d pop %ebp
80011231: c3 ret
and the old compiled code:
8001f6f0 <arch_saveThreadRegisters>:
8001f6f0: 8b 1d dc 2b 07 80 mov 0x80072bdc,%ebx
8001f6f6: dd 73 50 fnsave 0x50(%ebx)
8001f6f9: dd 63 50 frstor 0x50(%ebx)
8001f6fc: 8b 44 24 30 mov 0x30(%esp),%eax
8001f700: 83 e0 03 and $0x3,%eax
8001f703: 83 f8 03 cmp $0x3,%eax
8001f706: 74 5e je 8001f766 <from_user>
8001f708 <from_kernel>:
8001f708: 8b 44 24 2c mov 0x2c(%esp),%eax
8001f70c: 89 03 mov %eax,(%ebx)
8001f70e: 8b 44 24 30 mov 0x30(%esp),%eax
8001f712: 89 43 04 mov %eax,0x4(%ebx)
8001f715: 8b 44 24 34 mov 0x34(%esp),%eax
8001f719: 89 43 08 mov %eax,0x8(%ebx)
8001f71c: 8b 44 24 28 mov 0x28(%esp),%eax
8001f720: 89 43 0c mov %eax,0xc(%ebx)
8001f723: 8b 44 24 24 mov 0x24(%esp),%eax
8001f727: 89 43 10 mov %eax,0x10(%ebx)
8001f72a: 8b 44 24 20 mov 0x20(%esp),%eax
8001f72e: 89 43 14 mov %eax,0x14(%ebx)
8001f731: 8b 44 24 1c mov 0x1c(%esp),%eax
8001f735: 89 43 18 mov %eax,0x18(%ebx)
8001f738: 8b 44 24 18 mov 0x18(%esp),%eax
8001f73c: 83 c0 0c add $0xc,%eax
8001f73f: 89 43 1c mov %eax,0x1c(%ebx)
8001f742: 8b 44 24 14 mov 0x14(%esp),%eax
8001f746: 89 43 20 mov %eax,0x20(%ebx)
8001f749: 8b 44 24 10 mov 0x10(%esp),%eax
8001f74d: 89 43 24 mov %eax,0x24(%ebx)
8001f750: 8b 44 24 0c mov 0xc(%esp),%eax
8001f754: 89 43 28 mov %eax,0x28(%ebx)
8001f757: 8b 44 24 08 mov 0x8(%esp),%eax
8001f75b: 89 43 2c mov %eax,0x2c(%ebx)
8001f75e: 8b 44 24 04 mov 0x4(%esp),%eax
8001f762: 89 43 30 mov %eax,0x30(%ebx)
8001f765: c3 ret
8001f766 <from_user>:
8001f766: 8b 44 24 3c mov 0x3c(%esp),%eax
8001f76a: 89 43 3c mov %eax,0x3c(%ebx)
8001f76d: 8b 44 24 38 mov 0x38(%esp),%eax
8001f771: 89 43 1c mov %eax,0x1c(%ebx)
8001f774: 8b 44 24 2c mov 0x2c(%esp),%eax
8001f778: 89 03 mov %eax,(%ebx)
8001f77a: 8b 44 24 30 mov 0x30(%esp),%eax
8001f77e: 89 43 04 mov %eax,0x4(%ebx)
8001f781: 8b 44 24 34 mov 0x34(%esp),%eax
8001f785: 89 43 08 mov %eax,0x8(%ebx)
8001f788: 8b 44 24 28 mov 0x28(%esp),%eax
8001f78c: 89 43 0c mov %eax,0xc(%ebx)
8001f78f: 8b 44 24 24 mov 0x24(%esp),%eax
8001f793: 89 43 10 mov %eax,0x10(%ebx)
8001f796: 8b 44 24 20 mov 0x20(%esp),%eax
8001f79a: 89 43 14 mov %eax,0x14(%ebx)
8001f79d: 8b 44 24 1c mov 0x1c(%esp),%eax
8001f7a1: 89 43 18 mov %eax,0x18(%ebx)
8001f7a4: 8b 44 24 14 mov 0x14(%esp),%eax
8001f7a8: 89 43 20 mov %eax,0x20(%ebx)
8001f7ab: 8b 44 24 10 mov 0x10(%esp),%eax
8001f7af: 89 43 24 mov %eax,0x24(%ebx)
8001f7b2: 8b 44 24 0c mov 0xc(%esp),%eax
8001f7b6: 89 43 28 mov %eax,0x28(%ebx)
8001f7b9: 8b 44 24 08 mov 0x8(%esp),%eax
8001f7bd: 89 43 2c mov %eax,0x2c(%ebx)
8001f7c0: 8b 44 24 04 mov 0x4(%esp),%eax
8001f7c4: 89 43 30 mov %eax,0x30(%ebx)
8001f7c7: c3 ret
8001f7c8 <arch_saveThreadRegistersForPageFault>:
8001f7c8: 8b 1d dc 2b 07 80 mov 0x80072bdc,%ebx
8001f7ce: dd 73 50 fnsave 0x50(%ebx)
8001f7d1: dd 63 50 frstor 0x50(%ebx)
8001f7d4: 8b 44 24 34 mov 0x34(%esp),%eax
8001f7d8: 83 e0 03 and $0x3,%eax
8001f7db: 83 f8 03 cmp $0x3,%eax
8001f7de: 74 5e je 8001f83e <from_user_1>
8001f7e0 <from_kernel_1>:
8001f7e0: 8b 44 24 30 mov 0x30(%esp),%eax
8001f7e4: 89 03 mov %eax,(%ebx)
8001f7e6: 8b 44 24 34 mov 0x34(%esp),%eax
8001f7ea: 89 43 04 mov %eax,0x4(%ebx)
8001f7ed: 8b 44 24 38 mov 0x38(%esp),%eax
8001f7f1: 89 43 08 mov %eax,0x8(%ebx)
8001f7f4: 8b 44 24 28 mov 0x28(%esp),%eax
8001f7f8: 89 43 0c mov %eax,0xc(%ebx)
8001f7fb: 8b 44 24 24 mov 0x24(%esp),%eax
8001f7ff: 89 43 10 mov %eax,0x10(%ebx)
8001f802: 8b 44 24 20 mov 0x20(%esp),%eax
8001f806: 89 43 14 mov %eax,0x14(%ebx)
8001f809: 8b 44 24 1c mov 0x1c(%esp),%eax
8001f80d: 89 43 18 mov %eax,0x18(%ebx)
8001f810: 8b 44 24 18 mov 0x18(%esp),%eax
8001f814: 83 c0 0c add $0xc,%eax
8001f817: 89 43 1c mov %eax,0x1c(%ebx)
8001f81a: 8b 44 24 14 mov 0x14(%esp),%eax
8001f81e: 89 43 20 mov %eax,0x20(%ebx)
8001f821: 8b 44 24 10 mov 0x10(%esp),%eax
8001f825: 89 43 24 mov %eax,0x24(%ebx)
8001f828: 8b 44 24 0c mov 0xc(%esp),%eax
8001f82c: 89 43 28 mov %eax,0x28(%ebx)
8001f82f: 8b 44 24 08 mov 0x8(%esp),%eax
8001f833: 89 43 2c mov %eax,0x2c(%ebx)
8001f836: 8b 44 24 04 mov 0x4(%esp),%eax
8001f83a: 89 43 30 mov %eax,0x30(%ebx)
8001f83d: c3 ret
8001f83e <from_user_1>:
8001f83e: 8b 44 24 40 mov 0x40(%esp),%eax
8001f842: 89 43 3c mov %eax,0x3c(%ebx)
8001f845: 8b 44 24 3c mov 0x3c(%esp),%eax
8001f849: 89 43 1c mov %eax,0x1c(%ebx)
8001f84c: 8b 44 24 30 mov 0x30(%esp),%eax
8001f850: 89 03 mov %eax,(%ebx)
8001f852: 8b 44 24 34 mov 0x34(%esp),%eax
8001f856: 89 43 04 mov %eax,0x4(%ebx)
8001f859: 8b 44 24 38 mov 0x38(%esp),%eax
8001f85d: 89 43 08 mov %eax,0x8(%ebx)
8001f860: 8b 44 24 28 mov 0x28(%esp),%eax
8001f864: 89 43 0c mov %eax,0xc(%ebx)
8001f867: 8b 44 24 24 mov 0x24(%esp),%eax
8001f86b: 89 43 10 mov %eax,0x10(%ebx)
8001f86e: 8b 44 24 20 mov 0x20(%esp),%eax
8001f872: 89 43 14 mov %eax,0x14(%ebx)
8001f875: 8b 44 24 1c mov 0x1c(%esp),%eax
8001f879: 89 43 18 mov %eax,0x18(%ebx)
8001f87c: 8b 44 24 14 mov 0x14(%esp),%eax
8001f880: 89 43 20 mov %eax,0x20(%ebx)
8001f883: 8b 44 24 10 mov 0x10(%esp),%eax
8001f887: 89 43 24 mov %eax,0x24(%ebx)
8001f88a: 8b 44 24 0c mov 0xc(%esp),%eax
8001f88e: 89 43 28 mov %eax,0x28(%ebx)
8001f891: 8b 44 24 08 mov 0x8(%esp),%eax
8001f895: 89 43 2c mov %eax,0x2c(%ebx)
8001f898: 8b 44 24 04 mov 0x4(%esp),%eax
8001f89c: 89 43 30 mov %eax,0x30(%ebx)
8001f89f: c3 ret
Will packaging 4.9 for OS X be necessary to avoid running into this issue?
No... This is not related to specific gcc versions. And the latest commit on master seems to fix the issue
working on this issue in the fix_kpf_bug branch...
with 2e0bc1b it seems to work...
however, we don't want to switch back to asm...
i will next check the functions and the binary for differences between these two versions
extern "C" void arch_contextSwitch()
{
80011233: 55 push %ebp
80011234: 89 e5 mov %esp,%ebp
80011236: 57 push %edi
80011237: 56 push %esi
80011238: 53 push %ebx
80011239: 81 ec cc 00 00 00 sub $0xcc,%esp
assert(currentThread->stack_[0] == STACK_CANARY);
8001123f: a1 10 40 04 80 mov 0x80044010,%eax
80011244: 8b 40 0c mov 0xc(%eax),%eax
80011247: 3d ad de ad de cmp $0xdeaddead,%eax
8001124c: 74 17 je 80011265 <arch_contextSwitch+0x32>
8001124e: 83 ec 04 sub $0x4,%esp
80011251: 68 20 76 02 80 push $0x80027620
80011256: 68 9e 00 00 00 push $0x9e
8001125b: 68 98 76 02 80 push $0x80027698
80011260: e8 36 c9 ff ff call 8000db9b <sweb_assert>
ArchThreadInfo info = *currentThreadInfo; // optimization: local copy produces more efficient code in this case
80011265: 8b 15 0c 40 04 80 mov 0x8004400c,%edx
8001126b: 8d 85 2c ff ff ff lea -0xd4(%ebp),%eax
80011271: 89 d3 mov %edx,%ebx
80011273: ba 2f 00 00 00 mov $0x2f,%edx
80011278: 89 c7 mov %eax,%edi
8001127a: 89 de mov %ebx,%esi
8001127c: 89 d1 mov %edx,%ecx
8001127e: f3 a5 rep movsl %ds:(%esi),%es:(%edi)
if (currentThread->switch_to_userspace_)
80011280: a1 10 40 04 80 mov 0x80044010,%eax
80011285: 8b 80 0c 20 00 00 mov 0x200c(%eax),%eax
8001128b: 85 c0 test %eax,%eax
8001128d: 74 0e je 8001129d <arch_contextSwitch+0x6a>
{
asm("push %[ss]" : : [ss]"m"(info.ss));
8001128f: ff b5 68 ff ff ff pushl -0x98(%ebp)
asm("push %[esp]" : : [esp]"m"(info.esp));
80011295: ff b5 48 ff ff ff pushl -0xb8(%ebp)
8001129b: eb 06 jmp 800112a3 <arch_contextSwitch+0x70>
}
else
{
asm("mov %[esp], %%esp\n" : : [esp]"m"(info.esp));
8001129d: 8b a5 48 ff ff ff mov -0xb8(%ebp),%esp
}
g_tss->esp0 = info.esp0;
800112a3: a1 40 a0 04 80 mov 0x8004a040,%eax
800112a8: 8b 95 70 ff ff ff mov -0x90(%ebp),%edx
800112ae: 89 50 04 mov %edx,0x4(%eax)
asm("frstor (%[fpu])\n" : : [fpu]"r"(&info.fpu));
800112b1: 8d 85 2c ff ff ff lea -0xd4(%ebp),%eax
800112b7: 83 c0 50 add $0x50,%eax
800112ba: dd 20 frstor (%eax)
asm("mov %[cr3], %%cr3\n" : : [cr3]"r"(info.cr3));
800112bc: 8b 85 78 ff ff ff mov -0x88(%ebp),%eax
800112c2: 0f 22 d8 mov %eax,%cr3
asm("push %[eflags]\n" : : [eflags]"m"(info.eflags));
800112c5: ff b5 34 ff ff ff pushl -0xcc(%ebp)
asm("push %[cs]\n" : : [cs]"m"(info.cs));
800112cb: ff b5 30 ff ff ff pushl -0xd0(%ebp)
asm("push %[eip]\n" : : [eip]"m"(info.eip));
800112d1: ff b5 2c ff ff ff pushl -0xd4(%ebp)
asm("mov %[esi], %%esi\n" : : [esi]"m"(info.esi));
800112d7: 8b b5 50 ff ff ff mov -0xb0(%ebp),%esi
asm("mov %[edi], %%edi\n" : : [edi]"m"(info.edi));
800112dd: 8b bd 54 ff ff ff mov -0xac(%ebp),%edi
asm("mov %[es], %%es\n" : : [es]"m"(info.es));
800112e3: 8e 85 5c ff ff ff mov -0xa4(%ebp),%es
asm("mov %[ds], %%ds\n" : : [ds]"m"(info.ds));
800112e9: 8e 9d 58 ff ff ff mov -0xa8(%ebp),%ds
asm("push %[ebp]\n" : : [ebp]"m"(info.ebp));
800112ef: ff b5 4c ff ff ff pushl -0xb4(%ebp)
asm("pop %%ebp\n"
"iret" : : "a"(info.eax), "b"(info.ebx), "c"(info.ecx), "d"(info.edx));
800112f5: 8b 85 38 ff ff ff mov -0xc8(%ebp),%eax
800112fb: 8b 9d 44 ff ff ff mov -0xbc(%ebp),%ebx
80011301: 8b 8d 3c ff ff ff mov -0xc4(%ebp),%ecx
80011307: 8b 95 40 ff ff ff mov -0xc0(%ebp),%edx
8001130d: 5d pop %ebp
8001130e: cf iret
.global arch_switchThreadToUserPageDirChange
arch_switchThreadToUserPageDirChange:
movl currentThreadInfo,%ebx
8001358c: 8b 1d 0c 40 04 80 mov 0x8004400c,%ebx
frstor 80(%ebx)
80013592: dd 63 50 frstor 0x50(%ebx)
movl g_tss,%ecx # tss
80013595: 8b 0d 40 a0 04 80 mov 0x8004a040,%ecx
movl 68(%ebx),%eax # get esp0
8001359b: 8b 43 44 mov 0x44(%ebx),%eax
movl %eax,4(%ecx) # restore esp0
8001359e: 89 41 04 mov %eax,0x4(%ecx)
movl 76(%ebx),%eax # page directory
800135a1: 8b 43 4c mov 0x4c(%ebx),%eax
movl %eax, %cr3 # change page directory
800135a4: 0f 22 d8 mov %eax,%cr3
movl 12(%ebx),%eax # restore eax
800135a7: 8b 43 0c mov 0xc(%ebx),%eax
movl 16(%ebx),%ecx # restore ecx
800135aa: 8b 4b 10 mov 0x10(%ebx),%ecx
movl 20(%ebx),%edx # restore edx
800135ad: 8b 53 14 mov 0x14(%ebx),%edx
#mov esp, [ebx + 28] ; restore esp
movl 32(%ebx),%ebp # restore ebp
800135b0: 8b 6b 20 mov 0x20(%ebx),%ebp
movl 36(%ebx),%esi # restore esi
800135b3: 8b 73 24 mov 0x24(%ebx),%esi
movl 40(%ebx),%edi # restore edi
800135b6: 8b 7b 28 mov 0x28(%ebx),%edi
movw 48(%ebx),%es # restore es
800135b9: 8e 43 30 mov 0x30(%ebx),%es
movw 44(%ebx),%ds # restore ds
800135bc: 8e 5b 2c mov 0x2c(%ebx),%ds
pushl 60(%ebx)
800135bf: ff 73 3c pushl 0x3c(%ebx)
pushl 28(%ebx)
800135c2: ff 73 1c pushl 0x1c(%ebx)
pushl 8(%ebx)
800135c5: ff 73 08 pushl 0x8(%ebx)
pushl 4(%ebx)
800135c8: ff 73 04 pushl 0x4(%ebx)
pushl 0(%ebx)
800135cb: ff 33 pushl (%ebx)
pushl 24(%ebx)
800135cd: ff 73 18 pushl 0x18(%ebx)
popl %ebx # restore ebp
800135d0: 5b pop %ebx
iretl # switch to next
800135d1: cf iret
800135d2 <arch_switchThreadKernelToKernelPageDirChange>:
##----------------------------------------------------------------------
## switch thread and change page
##----------------------------------------------------------------------
.global arch_switchThreadKernelToKernelPageDirChange
arch_switchThreadKernelToKernelPageDirChange:
movl currentThreadInfo,%ebx
800135d2: 8b 1d 0c 40 04 80 mov 0x8004400c,%ebx
frstor 80(%ebx)
800135d8: dd 63 50 frstor 0x50(%ebx)
movl g_tss,%ecx # tss
800135db: 8b 0d 40 a0 04 80 mov 0x8004a040,%ecx
movl 68(%ebx),%eax # get esp0
800135e1: 8b 43 44 mov 0x44(%ebx),%eax
movl %eax,4(%ecx) # restore esp0
800135e4: 89 41 04 mov %eax,0x4(%ecx)
movl 76(%ebx),%eax # page directory
800135e7: 8b 43 4c mov 0x4c(%ebx),%eax
movl %eax, %cr3 # change page directory
800135ea: 0f 22 d8 mov %eax,%cr3
movl 12(%ebx),%eax # restore eax
800135ed: 8b 43 0c mov 0xc(%ebx),%eax
movl 16(%ebx),%ecx # restore ecx
800135f0: 8b 4b 10 mov 0x10(%ebx),%ecx
movl 20(%ebx),%edx # restore edx
800135f3: 8b 53 14 mov 0x14(%ebx),%edx
movl 28(%ebx),%esp # restore esp
800135f6: 8b 63 1c mov 0x1c(%ebx),%esp
movl 32(%ebx),%ebp # restore ebp
800135f9: 8b 6b 20 mov 0x20(%ebx),%ebp
movl 36(%ebx),%esi # restore esi
800135fc: 8b 73 24 mov 0x24(%ebx),%esi
movl 40(%ebx),%edi # restore edi
800135ff: 8b 7b 28 mov 0x28(%ebx),%edi
movw 48(%ebx),%es # restore es
80013602: 8e 43 30 mov 0x30(%ebx),%es
movw 44(%ebx),%ds # restore ds
80013605: 8e 5b 2c mov 0x2c(%ebx),%ds
pushl 8(%ebx)
80013608: ff 73 08 pushl 0x8(%ebx)
pushl 4(%ebx)
8001360b: ff 73 04 pushl 0x4(%ebx)
pushl 0(%ebx)
8001360e: ff 33 pushl (%ebx)
pushl 24(%ebx)
80013610: ff 73 18 pushl 0x18(%ebx)
popl %ebx # restore ebx
80013613: 5b pop %ebx
iretl # switch to next
80013614: cf iret