Mercurial > cgi-bin > hgwebdir.cgi > VMS > VMS_Implementations > VMS_impls > VMS__MC_shared_impl
comparison contextSwitch.s @ 135:0b49fd35afc1
distributed free working
-app sends a VMSSemReqst to his Master which send a request to a different Master
-Master send the request directly
-The request structure is freed by the sender, when the request was handled
There are still problems on shutdown. The shutdownVPs are all allocated by one Master which is likly to be terminated
| author | Merten Sach <msach@mailbox.tu-berlin.de> |
|---|---|
| date | Fri, 16 Sep 2011 20:08:28 +0200 |
| parents | 9ddbb071142d |
| children | 99343ffe1918 |
comparison
equal
deleted
inserted
replaced
| 3:bbdbf025a867 | 4:043fc655dfc8 |
|---|---|
| 1 .data | 1 .data |
| 2 | 2 |
| 3 | 3 |
| 4 .text | 4 .text |
| 5 | 5 /* VirtProcr offsets: |
| 6 * 0x10 stackPtr | |
| 7 * 0x18 framePtr | |
| 8 * 0x20 nextInstrPt | |
| 9 * 0x30 coreLoopFramePtr | |
| 10 * 0x38 coreLoopStackPtr | |
| 11 * | |
| 12 * _VMSMasterEnv offsets: | |
| 13 * 0x38 coreLoopReturnPt | |
| 14 * 0x44 masterLock | |
| 15 */ | |
| 6 //Save return label address for the coreLoop to pointer | 16 //Save return label address for the coreLoop to pointer |
| 7 //Arguments: Pointer to variable holding address | 17 //Arguments: Pointer to variable holding address |
| 8 .globl saveCoreLoopReturnAddr | 18 .globl saveCoreLoopReturnAddr |
| 9 saveCoreLoopReturnAddr: | 19 saveCoreLoopReturnAddr: |
| 10 movq $coreLoopReturn, %rcx #load label address | 20 movq $coreLoopReturn, %rcx #load label address |
| 21 movq (%rsp) , %rax #get function addr | 31 movq (%rsp) , %rax #get function addr |
| 22 jmp *%rax | 32 jmp *%rax |
| 23 | 33 |
| 24 //Switches form CoreLoop to VP ether a normal VP or the Master Loop | 34 //Switches form CoreLoop to VP ether a normal VP or the Master Loop |
| 25 //switch to virt procr's stack and frame ptr then jump to virt procr fn | 35 //switch to virt procr's stack and frame ptr then jump to virt procr fn |
| 26 /* VirtProcr offsets: | |
| 27 * 0x10 stackPtr | |
| 28 * 0x18 framePtr | |
| 29 * 0x20 nextInstrPt | |
| 30 * 0x30 coreLoopFramePtr | |
| 31 * 0x38 coreLoopStackPtr | |
| 32 * | |
| 33 * _VMSMasterEnv offsets: | |
| 34 * 0x48 coreLoopReturnPt | |
| 35 * 0x54 masterLock | |
| 36 */ | |
| 37 .globl switchToVP | 36 .globl switchToVP |
| 38 switchToVP: | 37 switchToVP: |
| 39 #VirtProcr in %rdi | 38 #VirtProcr in %rdi |
| 40 movq %rsp , 0x38(%rdi) #save core loop stack pointer | 39 movq %rsp , 0x38(%rdi) #save core loop stack pointer |
| 41 movq %rbp , 0x30(%rdi) #save core loop frame pointer | 40 movq %rbp , 0x30(%rdi) #save core loop frame pointer |
| 46 coreLoopReturn: | 45 coreLoopReturn: |
| 47 ret | 46 ret |
| 48 | 47 |
| 49 | 48 |
| 50 //switches to core loop. saves return address | 49 //switches to core loop. saves return address |
| 51 /* VirtProcr offsets: | |
| 52 * 0x10 stackPtr | |
| 53 * 0x18 framePtr | |
| 54 * 0x20 nextInstrPt | |
| 55 * 0x30 coreLoopFramePtr | |
| 56 * 0x38 coreLoopStackPtr | |
| 57 * | |
| 58 * _VMSMasterEnv offsets: | |
| 59 * 0x48 coreLoopReturnPt | |
| 60 * 0x54 masterLock | |
| 61 */ | |
| 62 .globl switchToCoreLoop | 50 .globl switchToCoreLoop |
| 63 switchToCoreLoop: | 51 switchToCoreLoop: |
| 64 #VirtProcr in %rdi | 52 #VirtProcr in %rdi |
| 65 movq $VPReturn , 0x20(%rdi) #store return address | 53 movq $VPReturn , 0x20(%rdi) #store return address |
| 66 movq %rsp , 0x10(%rdi) #save stack pointer | 54 movq %rsp , 0x10(%rdi) #save stack pointer |
| 67 movq %rbp , 0x18(%rdi) #save frame pointer | 55 movq %rbp , 0x18(%rdi) #save frame pointer |
| 68 movq 0x38(%rdi), %rsp #restore stack pointer | 56 movq 0x38(%rdi), %rsp #restore stack pointer |
| 69 movq 0x30(%rdi), %rbp #restore frame pointer | 57 movq 0x30(%rdi), %rbp #restore frame pointer |
| 70 movq $_VMSMasterEnv, %rcx | 58 movq $_VMSMasterEnv, %rcx |
| 71 movq (%rcx) , %rcx | 59 movq (%rcx) , %rcx |
| 72 movq 0x48(%rcx), %rax #get CoreLoopStartPt | 60 movq 0x38(%rcx), %rax #get CoreLoopStartPt |
| 73 jmp *%rax #jmp to CoreLoop | 61 jmp *%rax #jmp to CoreLoop |
| 74 VPReturn: | 62 VPReturn: |
| 75 ret | 63 ret |
| 76 | 64 |
| 77 | 65 |
| 78 | 66 |
| 79 //switches to core loop from master. saves return address | 67 //switches to core loop from master. saves return address |
| 80 //Releases masterLock so the next MasterLoop can be executed | 68 //Releases masterLock so the next MasterLoop can be executed |
| 81 /* VirtProcr offsets: | |
| 82 * 0x10 stackPtr | |
| 83 * 0x18 framePtr | |
| 84 * 0x20 nextInstrPt | |
| 85 * 0x30 coreLoopFramePtr | |
| 86 * 0x38 coreLoopStackPtr | |
| 87 * | |
| 88 * _VMSMasterEnv offsets: | |
| 89 * 0x48 coreLoopReturnPt | |
| 90 * 0x54 masterLock | |
| 91 */ | |
| 92 .globl masterSwitchToCoreLoop | 69 .globl masterSwitchToCoreLoop |
| 93 masterSwitchToCoreLoop: | 70 masterSwitchToCoreLoop: |
| 94 #VirtProcr in %rdi | 71 #VirtProcr in %rdi |
| 95 movq $MasterReturn, 0x20(%rdi) #store return address | 72 movq $MasterReturn, 0x20(%rdi) #store return address |
| 96 movq %rsp , 0x10(%rdi) #save stack pointer | 73 movq %rsp , 0x10(%rdi) #save stack pointer |
| 97 movq %rbp , 0x18(%rdi) #save frame pointer | 74 movq %rbp , 0x18(%rdi) #save frame pointer |
| 98 movq 0x38(%rdi), %rsp #restore stack pointer | 75 movq 0x38(%rdi), %rsp #restore stack pointer |
| 99 movq 0x30(%rdi), %rbp #restore frame pointer | 76 movq 0x30(%rdi), %rbp #restore frame pointer |
| 100 movq $_VMSMasterEnv, %rcx | 77 movq $_VMSMasterEnv, %rcx |
| 101 movq (%rcx) , %rcx | 78 movq (%rcx) , %rcx |
| 102 movq 0x48(%rcx), %rax #get CoreLoopStartPt | 79 movq 0x38(%rcx), %rax #get CoreLoopStartPt |
| 103 movl $0x0 , 0x54(%rcx) #release lock | 80 movl $0x0 , 0x44(%rcx) #release lock |
| 104 jmp *%rax #jmp to CoreLoop | 81 jmp *%rax #jmp to CoreLoop |
| 105 MasterReturn: | 82 MasterReturn: |
| 106 ret | 83 ret |
| 107 | 84 |
| 108 | 85 |
| 110 //therefor switch to coreLoop context from master context | 87 //therefor switch to coreLoop context from master context |
| 111 // no need to call because the stack is already set up for switchVP | 88 // no need to call because the stack is already set up for switchVP |
| 112 // and virtPr is in %rdi | 89 // and virtPr is in %rdi |
| 113 // and both functions have the same argument. | 90 // and both functions have the same argument. |
| 114 // do not save register of VP because this function will never return | 91 // do not save register of VP because this function will never return |
| 115 /* VirtProcr offsets: | |
| 116 * 0x10 stackPtr | |
| 117 * 0x18 framePtr | |
| 118 * 0x20 nextInstrPt | |
| 119 * 0x30 coreLoopFramePtr | |
| 120 * 0x38 coreLoopStackPtr | |
| 121 * | |
| 122 * _VMSMasterEnv offsets: | |
| 123 * 0x48 coreLoopReturnPt | |
| 124 * 0x58 masterLock | |
| 125 */ | |
| 126 .globl asmTerminateCoreLoop | 92 .globl asmTerminateCoreLoop |
| 127 asmTerminateCoreLoop: | 93 asmTerminateCoreLoop: |
| 128 #VirtProcr in %rdi | 94 #VirtProcr in %rdi |
| 129 movq 0x38(%rdi), %rsp #restore stack pointer | 95 movq 0x38(%rdi), %rsp #restore stack pointer |
| 130 movq 0x30(%rdi), %rbp #restore frame pointer | 96 movq 0x30(%rdi), %rbp #restore frame pointer |
