user_space #4
@ -12,7 +12,7 @@
|
|||||||
|
|
||||||
/* The address of the function to call to set back the user thread's
|
/* The address of the function to call to set back the user thread's
|
||||||
MMU configuration upon return to user context */
|
MMU configuration upon return to user context */
|
||||||
.extern thread_prepare_exception_switch_back
|
.extern threadPrepareExceptionSwitchBack
|
||||||
|
|
||||||
.altmacro
|
.altmacro
|
||||||
|
|
||||||
@ -60,7 +60,7 @@
|
|||||||
|
|
||||||
/* Reconfigure the MMU if needed */
|
/* Reconfigure the MMU if needed */
|
||||||
pushl %esp /* cpu_ctxt */
|
pushl %esp /* cpu_ctxt */
|
||||||
call thread_prepare_exception_switch_back
|
call threadPrepareExceptionSwitchBack
|
||||||
addl $4, %esp /* Unallocate the stack */
|
addl $4, %esp /* Unallocate the stack */
|
||||||
|
|
||||||
/* Prepare kernel TSS in case we are switching to a
|
/* Prepare kernel TSS in case we are switching to a
|
||||||
@ -99,6 +99,7 @@
|
|||||||
/* uint32_t flags */
|
/* uint32_t flags */
|
||||||
/* uint32_t cs; */
|
/* uint32_t cs; */
|
||||||
/* uint32_t ip */
|
/* uint32_t ip */
|
||||||
|
/* uint32_t errcode */
|
||||||
/* Pushes the other reg to save same and look like a struct cpu_state*/
|
/* Pushes the other reg to save same and look like a struct cpu_state*/
|
||||||
|
|
||||||
/* Backup the actual context */
|
/* Backup the actual context */
|
||||||
@ -132,7 +133,7 @@
|
|||||||
|
|
||||||
/* Reconfigure the MMU if needed */
|
/* Reconfigure the MMU if needed */
|
||||||
pushl %esp /* cpu_ctxt */
|
pushl %esp /* cpu_ctxt */
|
||||||
call thread_prepare_exception_switch_back
|
call threadPrepareExceptionSwitchBack
|
||||||
addl $4, %esp /* Unallocate the stack */
|
addl $4, %esp /* Unallocate the stack */
|
||||||
|
|
||||||
/* Prepare kernel TSS in case we are switching to a
|
/* Prepare kernel TSS in case we are switching to a
|
||||||
@ -158,6 +159,9 @@
|
|||||||
popl %eax
|
popl %eax
|
||||||
popl %ebp
|
popl %ebp
|
||||||
|
|
||||||
|
/* Error code isn't compatible with iretd */
|
||||||
|
addl $4, %esp
|
||||||
|
|
||||||
iret
|
iret
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
/* The address of the function to call to set back the user thread's
|
/* The address of the function to call to set back the user thread's
|
||||||
MMU configuration upon return to user context */
|
MMU configuration upon return to user context */
|
||||||
.extern thread_prepare_irq_switch_back
|
.extern threadPrepareIrqSwitchBack
|
||||||
|
|
||||||
.altmacro
|
.altmacro
|
||||||
|
|
||||||
@ -59,7 +59,7 @@
|
|||||||
|
|
||||||
/* Reconfigure the MMU if needed */
|
/* Reconfigure the MMU if needed */
|
||||||
pushl %esp /* cpu_ctxt */
|
pushl %esp /* cpu_ctxt */
|
||||||
call thread_prepare_irq_switch_back
|
call threadPrepareIrqSwitchBack
|
||||||
addl $4, %esp /* Unallocate the stack */
|
addl $4, %esp /* Unallocate the stack */
|
||||||
|
|
||||||
/* Prepare kernel TSS in case we are switching to a
|
/* Prepare kernel TSS in case we are switching to a
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
/* The address of the function to call to set back the user thread's
|
/* The address of the function to call to set back the user thread's
|
||||||
MMU configuration upon return to user context */
|
MMU configuration upon return to user context */
|
||||||
.extern thread_prepare_syscall_switch_back
|
.extern threadPrepareSyscallSwitchBack
|
||||||
|
|
||||||
.p2align 2, 0x90
|
.p2align 2, 0x90
|
||||||
.globl syscallHandler
|
.globl syscallHandler
|
||||||
@ -61,7 +61,7 @@ syscallHandler:
|
|||||||
|
|
||||||
/* Set the MMU configuration to that of the user thread's process */
|
/* Set the MMU configuration to that of the user thread's process */
|
||||||
pushl %esp /* user_ctxt */
|
pushl %esp /* user_ctxt */
|
||||||
call thread_prepare_syscall_switch_back
|
call threadPrepareSyscallSwitchBack
|
||||||
addl $4, %esp /* Unallocate the stack */
|
addl $4, %esp /* Unallocate the stack */
|
||||||
|
|
||||||
/* Prepare kernel TSS because we are switching back to a user
|
/* Prepare kernel TSS because we are switching back to a user
|
||||||
|
@ -260,16 +260,18 @@ int kthreadAddThread(struct kthread *th)
|
|||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
void thread_prepare_syscall_switch_back(struct cpu_state *cpu_state){
|
|
||||||
|
void threadPrepareSyscallSwitchBack(struct cpu_state *cpu_state){
|
||||||
(void)cpu_state;
|
(void)cpu_state;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
void thread_prepare_exception_switch_back(struct cpu_state *cpu_state){
|
void threadPrepareExceptionSwitchBack(struct cpu_state *cpu_state){
|
||||||
(void)cpu_state;
|
(void)cpu_state;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
void thread_prepare_irq_switch_back(struct cpu_state *cpu_state){
|
|
||||||
|
void threadPrepareIrqSwitchBack(struct cpu_state *cpu_state){
|
||||||
(void)cpu_state;
|
(void)cpu_state;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user