Mercurial > cgi-bin > hgwebdir.cgi > VMS > VMS_Implementations > VMS_impls > VMS__MC_shared_impl
comparison contextSwitch.s @ 76:9ddbb071142d
make hardware independent and port to 64bit
| author | Merten Sach <msach@mailbox.tu-berlin.de> |
|---|---|
| date | Thu, 16 Jun 2011 14:41:15 +0200 |
| parents | f6990e1ba998 |
| children | dbfc8382d546 bfaebdf60df3 |
comparison
equal
deleted
inserted
replaced
| 2:b6954584a39f | 3:bbdbf025a867 |
|---|---|
| 5 | 5 |
| 6 //Save return label address for the coreLoop to pointer | 6 //Save return label address for the coreLoop to pointer |
| 7 //Arguments: Pointer to variable holding address | 7 //Arguments: Pointer to variable holding address |
| 8 .globl saveCoreLoopReturnAddr | 8 .globl saveCoreLoopReturnAddr |
| 9 saveCoreLoopReturnAddr: | 9 saveCoreLoopReturnAddr: |
| 10 movl 0x4(%esp) , %eax #load pointer | 10 movq $coreLoopReturn, %rcx #load label address |
| 11 movl $coreLoopReturn, %ecx #load label address | 11 movq %rcx, (%rdi) #save address to pointer |
| 12 movl %ecx, (%eax) #save address | |
| 13 ret | 12 ret |
| 14 | 13 |
| 15 | 14 |
| 15 //Initializes VirtProcrFn at first run for 64 bit mode | |
| 16 //Puts argument from stack into registers | |
| 17 .globl startVirtProcrFn | |
| 18 startVirtProcrFn: | |
| 19 movq %rdi , %rsi #get second argument from first argument of switchVP | |
| 20 movq 0x08(%rsp), %rdi #get first argument | |
| 21 movq (%rsp) , %rax #get function addr | |
| 22 jmp *%rax | |
| 16 | 23 |
| 17 //Switches form CoreLoop to VP ether normal VP or the Master Loop | 24 //Switches form CoreLoop to VP ether a normal VP or the Master Loop |
| 18 //switch to virt procr's stack and frame ptr then jump to virt procr fn | 25 //switch to virt procr's stack and frame ptr then jump to virt procr fn |
| 19 /* VirtProcr offsets: | 26 /* VirtProcr offsets: |
| 20 * 0xc stackPtr | 27 * 0x10 stackPtr |
| 21 * 0x10 framePtr | 28 * 0x18 framePtr |
| 22 * 0x14 nextInstrPt | 29 * 0x20 nextInstrPt |
| 23 * 0x1c coreLoopFramePtr | 30 * 0x30 coreLoopFramePtr |
| 24 * 0x20 coreLoopStackPtr | 31 * 0x38 coreLoopStackPtr |
| 25 * | 32 * |
| 26 * _VMSMasterEnv offsets: | 33 * _VMSMasterEnv offsets: |
| 27 * 0x24 coreLoopReturnPt | 34 * 0x48 coreLoopReturnPt |
| 28 * 0x2c masterLock | 35 * 0x54 masterLock |
| 29 */ | 36 */ |
| 30 .globl switchToVP | 37 .globl switchToVP |
| 31 switchToVP: | 38 switchToVP: |
| 32 movl 0x4(%esp) , %ecx #get VirtProcr | 39 #VirtProcr in %rdi |
| 33 movl %esp , 0x20(%ecx) #save core loop stack pointer | 40 movq %rsp , 0x38(%rdi) #save core loop stack pointer |
| 34 movl %ebp , 0x1c(%ecx) #save core loop frame pointer | 41 movq %rbp , 0x30(%rdi) #save core loop frame pointer |
| 35 movl 0x0c(%ecx), %esp #restore stack pointer | 42 movq 0x10(%rdi), %rsp #restore stack pointer |
| 36 movl 0x10(%ecx), %ebp #restore frame pointer | 43 movq 0x18(%rdi), %rbp #restore frame pointer |
| 37 movl 0x14(%ecx), %eax #get jmp pointer | 44 movq 0x20(%rdi), %rax #get jmp pointer |
| 38 jmp *%eax #jmp to VP | 45 jmp *%rax #jmp to VP |
| 39 coreLoopReturn: | 46 coreLoopReturn: |
| 40 ret | 47 ret |
| 41 | 48 |
| 42 | 49 |
| 43 //switches to core loop. saves return address | 50 //switches to core loop. saves return address |
| 44 /* VirtProcr offsets: | 51 /* VirtProcr offsets: |
| 45 * 0xc stackPtr | 52 * 0x10 stackPtr |
| 46 * 0x10 framePtr | 53 * 0x18 framePtr |
| 47 * 0x14 nextInstrPt | 54 * 0x20 nextInstrPt |
| 48 * 0x1c coreLoopFramePtr | 55 * 0x30 coreLoopFramePtr |
| 49 * 0x20 coreLoopStackPtr | 56 * 0x38 coreLoopStackPtr |
| 50 * | 57 * |
| 51 * _VMSMasterEnv offsets: | 58 * _VMSMasterEnv offsets: |
| 52 * 0x24 coreLoopReturnPt | 59 * 0x48 coreLoopReturnPt |
| 53 * 0x28 coreLoopEndPt | 60 * 0x54 masterLock |
| 54 * 0x2c masterLock | |
| 55 */ | 61 */ |
| 56 .globl switchToCoreLoop | 62 .globl switchToCoreLoop |
| 57 switchToCoreLoop: | 63 switchToCoreLoop: |
| 58 movl 0x4(%esp) , %ecx #get VirtProcr | 64 #VirtProcr in %rdi |
| 59 movl $VPReturn , 0x14(%ecx) #store return address | 65 movq $VPReturn , 0x20(%rdi) #store return address |
| 60 movl %esp , 0x0c(%ecx) #save stack pointer | 66 movq %rsp , 0x10(%rdi) #save stack pointer |
| 61 movl %ebp , 0x10(%ecx) #save frame pointer | 67 movq %rbp , 0x18(%rdi) #save frame pointer |
| 62 movl 0x20(%ecx), %esp #restore stack pointer | 68 movq 0x38(%rdi), %rsp #restore stack pointer |
| 63 movl 0x1c(%ecx), %ebp #restore frame pointer | 69 movq 0x30(%rdi), %rbp #restore frame pointer |
| 64 movl $_VMSMasterEnv, %ecx | 70 movq $_VMSMasterEnv, %rcx |
| 65 movl (%ecx) , %ecx | 71 movq (%rcx) , %rcx |
| 66 movl 0x24(%ecx), %eax #get CoreLoopStartPt | 72 movq 0x48(%rcx), %rax #get CoreLoopStartPt |
| 67 jmp *%eax #jmp to CoreLoop | 73 jmp *%rax #jmp to CoreLoop |
| 68 VPReturn: | 74 VPReturn: |
| 69 ret | 75 ret |
| 70 | 76 |
| 71 | 77 |
| 72 | 78 |
| 73 //switches to core loop from master. saves return address | 79 //switches to core loop from master. saves return address |
| 74 //Releases masterLock so the next MasterLoop can be executed | 80 //Releases masterLock so the next MasterLoop can be executed |
| 75 /* VirtProcr offsets: | 81 /* VirtProcr offsets: |
| 76 * 0xc stackPtr | 82 * 0x10 stackPtr |
| 77 * 0x10 framePtr | 83 * 0x18 framePtr |
| 78 * 0x14 nextInstrPt | 84 * 0x20 nextInstrPt |
| 79 * 0x1c coreLoopFramePtr | 85 * 0x30 coreLoopFramePtr |
| 80 * 0x20 coreLoopStackPtr | 86 * 0x38 coreLoopStackPtr |
| 81 * | 87 * |
| 82 * _VMSMasterEnv offsets: | 88 * _VMSMasterEnv offsets: |
| 83 * 0x24 coreLoopReturnPt | 89 * 0x48 coreLoopReturnPt |
| 84 * 0x2c masterLock | 90 * 0x54 masterLock |
| 85 */ | 91 */ |
| 86 .globl masterSwitchToCoreLoop | 92 .globl masterSwitchToCoreLoop |
| 87 masterSwitchToCoreLoop: | 93 masterSwitchToCoreLoop: |
| 88 movl 0x4(%esp) , %ecx #get VirtProcr | 94 #VirtProcr in %rdi |
| 89 movl $MasterReturn, 0x14(%ecx) #store return address | 95 movq $MasterReturn, 0x20(%rdi) #store return address |
| 90 movl %esp , 0x0c(%ecx) #save stack pointer | 96 movq %rsp , 0x10(%rdi) #save stack pointer |
| 91 movl %ebp , 0x10(%ecx) #save frame pointer | 97 movq %rbp , 0x18(%rdi) #save frame pointer |
| 92 movl 0x20(%ecx), %esp #restore stack pointer | 98 movq 0x38(%rdi), %rsp #restore stack pointer |
| 93 movl 0x1c(%ecx), %ebp #restore frame pointer | 99 movq 0x30(%rdi), %rbp #restore frame pointer |
| 94 movl $_VMSMasterEnv, %ecx | 100 movq $_VMSMasterEnv, %rcx |
| 95 movl (%ecx) , %ecx | 101 movq (%rcx) , %rcx |
| 96 movl 0x24(%ecx), %eax #get CoreLoopStartPt | 102 movq 0x48(%rcx), %rax #get CoreLoopStartPt |
| 97 movl $0x0 , 0x2c(%ecx) #release lock | 103 movl $0x0 , 0x54(%rcx) #release lock |
| 98 jmp *%eax #jmp to CoreLoop | 104 jmp *%rax #jmp to CoreLoop |
| 99 MasterReturn: | 105 MasterReturn: |
| 100 ret | 106 ret |
| 101 | 107 |
| 102 | 108 |
| 103 //Switch to terminateCoreLoop | 109 //Switch to terminateCoreLoop |
| 110 //therefor switch to coreLoop context from master context | |
| 104 // no need to call because the stack is already set up for switchVP | 111 // no need to call because the stack is already set up for switchVP |
| 112 // and virtPr is in %rdi | |
| 105 // and both functions have the same argument. | 113 // and both functions have the same argument. |
| 106 // do not save register of VP because this function will never return | 114 // do not save register of VP because this function will never return |
| 107 /* VirtProcr offsets: | 115 /* VirtProcr offsets: |
| 108 * 0xc stackPtr | 116 * 0x10 stackPtr |
| 109 * 0x10 framePtr | 117 * 0x18 framePtr |
| 110 * 0x14 nextInstrPt | 118 * 0x20 nextInstrPt |
| 111 * 0x1c coreLoopFramePtr | 119 * 0x30 coreLoopFramePtr |
| 112 * 0x20 coreLoopStackPtr | 120 * 0x38 coreLoopStackPtr |
| 113 * | 121 * |
| 114 * _VMSMasterEnv offsets: | 122 * _VMSMasterEnv offsets: |
| 115 * 0x24 coreLoopReturnPt | 123 * 0x48 coreLoopReturnPt |
| 116 * 0x2c masterLock | 124 * 0x58 masterLock |
| 117 */ | 125 */ |
| 118 .globl asmTerminateCoreLoop | 126 .globl asmTerminateCoreLoop |
| 119 asmTerminateCoreLoop: | 127 asmTerminateCoreLoop: |
| 120 movl 0x4(%esp) , %ecx #get VirtProcr | 128 #VirtProcr in %rdi |
| 121 movl 0x20(%ecx), %esp #restore stack pointer | 129 movq 0x38(%rdi), %rsp #restore stack pointer |
| 122 movl 0x1c(%ecx), %ebp #restore frame pointer | 130 movq 0x30(%rdi), %rbp #restore frame pointer |
| 123 movl $terminateCoreLoop, %eax | 131 movq $terminateCoreLoop, %rax |
| 124 jmp *%eax #jmp to CoreLoop | 132 jmp *%rax #jmp to CoreLoop |
| 125 | 133 |
| 126 | 134 |
| 127 /* | 135 /* |
| 128 * This one for the sequential version is special. It discards the current stack | 136 * This one for the sequential version is special. It discards the current stack |
| 129 * and returns directly from the coreLoop after VMS__dissipate_procr was called | 137 * and returns directly from the coreLoop after VMS__dissipate_procr was called |
| 130 */ | 138 */ |
| 131 .globl asmTerminateCoreLoopSeq | 139 .globl asmTerminateCoreLoopSeq |
| 132 asmTerminateCoreLoopSeq: | 140 asmTerminateCoreLoopSeq: |
| 133 movl 0x4(%esp) , %ecx #get VirtProcr | 141 #VirtProcr in %rdi |
| 134 movl 0x20(%ecx), %esp #restore stack pointer | 142 movq 0x38(%rdi), %rsp #restore stack pointer |
| 135 movl 0x1c(%ecx), %ebp #restore frame pointer | 143 movq 0x30(%rdi), %rbp #restore frame pointer |
| 136 sub $0x4 , %esp | 144 #argument is in %rdi |
| 137 movl %ecx , (%esp) #put argument on stack | |
| 138 call VMS__dissipate_procr | 145 call VMS__dissipate_procr |
| 139 movl %ebp , %esp #goto the coreLoops stack | 146 movq %rbp , %rsp #goto the coreLoops stack |
| 140 pop %ebp #restore the old framepointer | 147 pop %rbp #restore the old framepointer |
| 141 ret #return from core loop | 148 ret #return from core loop |
| 142 | 149 |
