| rev |
line source |
|
msach@71
|
1 #include "VMS.h"
|
|
msach@71
|
2
|
|
msach@71
|
3 .data
|
|
msach@71
|
4
|
|
msach@71
|
5
|
|
msach@71
|
6 .text
|
|
msach@71
|
7
|
|
msach@71
|
8 //Save return label address for the coreLoop to pointer
|
|
msach@71
|
9 //Arguments: Pointer to variable holding address
|
|
msach@71
|
10 .globl saveCoreLoopReturnAddr
|
|
msach@71
|
11 saveCoreLoopReturnAddr:
|
|
msach@71
|
12 movl 0x4(%esp) , %eax #load pointer
|
|
msach@71
|
13 movl $coreLoopReturn, %ecx #load label address
|
|
msach@71
|
14 movl %ecx, (%eax) #save address
|
|
msach@71
|
15 ret
|
|
msach@71
|
16
|
|
msach@71
|
17
|
|
msach@71
|
18
|
|
msach@71
|
19 //Switches form CoreLoop to VP ether normal VP or the Master Loop
|
|
msach@71
|
20 //switch to virt procr's stack and frame ptr then jump to virt procr fn
|
|
msach@71
|
21 /* VirtProcr offsets:
|
|
msach@71
|
22 * 0xc stackPtr
|
|
msach@71
|
23 * 0x10 framePtr
|
|
msach@71
|
24 * 0x14 nextInstrPt
|
|
msach@71
|
25 * 0x1c coreLoopFramePtr
|
|
msach@71
|
26 * 0x20 coreLoopStackPtr
|
|
msach@71
|
27 *
|
|
msach@71
|
28 * _VMSMasterEnv offsets:
|
|
msach@71
|
29 * 0x24 coreLoopStartPt
|
|
msach@71
|
30 * 0x28 coreLoopEndPt
|
|
msach@71
|
31 * 0x30 masterLock
|
|
msach@71
|
32 */
|
|
msach@71
|
33 .globl switchToVP
|
|
msach@71
|
34 switchToVP:
|
|
msach@71
|
35 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
36 movl %esp , 0x20(%ecx) #save core loop stack pointer
|
|
msach@71
|
37 movl %ebp , 0x1c(%ecx) #save core loop frame pointer
|
|
msach@71
|
38 movl 0x0c(%ecx), %esp #restore stack pointer
|
|
msach@71
|
39 movl 0x10(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
40 movl 0x14(%ecx), %eax #get jmp pointer
|
|
msach@71
|
41 jmp *%eax #jmp to VP
|
|
msach@71
|
42 coreLoopReturn:
|
|
msach@71
|
43 ret
|
|
msach@71
|
44
|
|
msach@71
|
45
|
|
msach@71
|
46 //switches to core loop. saves return address
|
|
msach@71
|
47 /* VirtProcr offsets:
|
|
msach@71
|
48 * 0xc stackPtr
|
|
msach@71
|
49 * 0x10 framePtr
|
|
msach@71
|
50 * 0x14 nextInstrPt
|
|
msach@71
|
51 * 0x1c coreLoopFramePtr
|
|
msach@71
|
52 * 0x20 coreLoopStackPtr
|
|
msach@71
|
53 *
|
|
msach@71
|
54 * _VMSMasterEnv offsets:
|
|
msach@71
|
55 * 0x24 coreLoopStartPt
|
|
msach@71
|
56 * 0x28 coreLoopEndPt
|
|
msach@71
|
57 * 0x30 masterLock
|
|
msach@71
|
58 */
|
|
msach@71
|
59 .globl switchToCoreLoop
|
|
msach@71
|
60 switchToCoreLoop:
|
|
msach@71
|
61 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
62 movl $VPReturn , 0x14(%ecx) #store return address
|
|
msach@71
|
63 movl %esp , 0x0c(%ecx) #save stack pointer
|
|
msach@71
|
64 movl %ebp , 0x10(%ecx) #save frame pointer
|
|
msach@71
|
65 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
66 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
67 movl $_VMSMasterEnv, %ecx
|
|
msach@71
|
68 movl (%ecx) , %ecx
|
|
msach@71
|
69 movl 0x24(%ecx), %eax #get CoreLoopStartPt
|
|
msach@71
|
70 jmp *%eax #jmp to CoreLoop
|
|
msach@71
|
71 VPReturn:
|
|
msach@71
|
72 ret
|
|
msach@71
|
73
|
|
msach@71
|
74
|
|
msach@71
|
75
|
|
msach@71
|
76 //switches to core loop from master. saves return address
|
|
msach@71
|
77 /* VirtProcr offsets:
|
|
msach@71
|
78 * 0xc stackPtr
|
|
msach@71
|
79 * 0x10 framePtr
|
|
msach@71
|
80 * 0x14 nextInstrPt
|
|
msach@71
|
81 * 0x1c coreLoopFramePtr
|
|
msach@71
|
82 * 0x20 coreLoopStackPtr
|
|
msach@71
|
83 *
|
|
msach@71
|
84 * _VMSMasterEnv offsets:
|
|
msach@71
|
85 * 0x24 coreLoopStartPt
|
|
msach@71
|
86 * 0x28 coreLoopEndPt
|
|
msach@71
|
87 * 0x30 masterLock
|
|
msach@71
|
88 */
|
|
msach@71
|
89 .globl masterSwitchToCoreLoop
|
|
msach@71
|
90 masterSwitchToCoreLoop:
|
|
msach@71
|
91 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
92 movl $MasterReturn, 0x14(%ecx) #store return address
|
|
msach@71
|
93 movl %esp , 0x0c(%ecx) #save stack pointer
|
|
msach@71
|
94 movl %ebp , 0x10(%ecx) #save frame pointer
|
|
msach@71
|
95 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
96 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
97 movl $_VMSMasterEnv, %ecx
|
|
msach@71
|
98 movl (%ecx) , %ecx
|
|
msach@71
|
99 movl 0x24(%ecx), %eax #get CoreLoopStartPt
|
|
msach@71
|
100 movl $0x0 , 0x30(%ecx) #release lock
|
|
msach@71
|
101 jmp *%eax #jmp to CoreLoop
|
|
msach@71
|
102 MasterReturn:
|
|
msach@71
|
103 ret
|
|
msach@71
|
104
|
|
msach@71
|
105
|
|
msach@71
|
106 //Switch to terminateCoreLoop
|
|
msach@71
|
107 //no need to call because the stack is already set up for switchVP
|
|
msach@71
|
108 //do not save register of VP because this function will never return
|
|
msach@71
|
109 /* VirtProcr offsets:
|
|
msach@71
|
110 * 0xc stackPtr
|
|
msach@71
|
111 * 0x10 framePtr
|
|
msach@71
|
112 * 0x14 nextInstrPt
|
|
msach@71
|
113 * 0x1c coreLoopFramePtr
|
|
msach@71
|
114 * 0x20 coreLoopStackPtr
|
|
msach@71
|
115 *
|
|
msach@71
|
116 * _VMSMasterEnv offsets:
|
|
msach@71
|
117 * 0x24 coreLoopStartPt
|
|
msach@71
|
118 * 0x28 coreLoopEndPt
|
|
msach@71
|
119 * 0x30 masterLock
|
|
msach@71
|
120 */
|
|
msach@71
|
121 .globl asmTerminateCoreLoop
|
|
msach@71
|
122 asmTerminateCoreLoop:
|
|
msach@71
|
123 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
124 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
125 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
126 movl $terminateCoreLoop, %eax
|
|
msach@71
|
127 jmp *%eax #jmp to CoreLoop
|
|
msach@71
|
128
|