| rev |
line source |
|
msach@71
|
1 .data
|
|
msach@71
|
2
|
|
msach@71
|
3
|
|
msach@71
|
4 .text
|
|
msach@71
|
5
|
|
msach@71
|
6 //Save return label address for the coreLoop to pointer
|
|
msach@71
|
7 //Arguments: Pointer to variable holding address
|
|
msach@71
|
8 .globl saveCoreLoopReturnAddr
|
|
msach@71
|
9 saveCoreLoopReturnAddr:
|
|
msach@71
|
10 movl 0x4(%esp) , %eax #load pointer
|
|
msach@71
|
11 movl $coreLoopReturn, %ecx #load label address
|
|
msach@71
|
12 movl %ecx, (%eax) #save address
|
|
msach@71
|
13 ret
|
|
msach@71
|
14
|
|
msach@71
|
15
|
|
msach@71
|
16
|
|
msach@71
|
17 //Switches form CoreLoop to VP ether normal VP or the Master Loop
|
|
msach@71
|
18 //switch to virt procr's stack and frame ptr then jump to virt procr fn
|
|
msach@71
|
19 /* VirtProcr offsets:
|
|
msach@71
|
20 * 0xc stackPtr
|
|
msach@71
|
21 * 0x10 framePtr
|
|
msach@71
|
22 * 0x14 nextInstrPt
|
|
msach@71
|
23 * 0x1c coreLoopFramePtr
|
|
msach@71
|
24 * 0x20 coreLoopStackPtr
|
|
msach@71
|
25 *
|
|
msach@71
|
26 * _VMSMasterEnv offsets:
|
|
msach@73
|
27 * 0x24 coreLoopReturnPt
|
|
msach@73
|
28 * 0x2c masterLock
|
|
msach@71
|
29 */
|
|
msach@71
|
30 .globl switchToVP
|
|
msach@71
|
31 switchToVP:
|
|
msach@71
|
32 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
33 movl %esp , 0x20(%ecx) #save core loop stack pointer
|
|
msach@71
|
34 movl %ebp , 0x1c(%ecx) #save core loop frame pointer
|
|
msach@71
|
35 movl 0x0c(%ecx), %esp #restore stack pointer
|
|
msach@71
|
36 movl 0x10(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
37 movl 0x14(%ecx), %eax #get jmp pointer
|
|
msach@71
|
38 jmp *%eax #jmp to VP
|
|
msach@71
|
39 coreLoopReturn:
|
|
msach@71
|
40 ret
|
|
msach@71
|
41
|
|
msach@71
|
42
|
|
msach@71
|
43 //switches to core loop. saves return address
|
|
msach@71
|
44 /* VirtProcr offsets:
|
|
msach@71
|
45 * 0xc stackPtr
|
|
msach@71
|
46 * 0x10 framePtr
|
|
msach@71
|
47 * 0x14 nextInstrPt
|
|
msach@71
|
48 * 0x1c coreLoopFramePtr
|
|
msach@71
|
49 * 0x20 coreLoopStackPtr
|
|
msach@71
|
50 *
|
|
msach@71
|
51 * _VMSMasterEnv offsets:
|
|
msach@73
|
52 * 0x24 coreLoopReturnPt
|
|
msach@71
|
53 * 0x28 coreLoopEndPt
|
|
msach@73
|
54 * 0x2c masterLock
|
|
msach@71
|
55 */
|
|
msach@71
|
56 .globl switchToCoreLoop
|
|
msach@71
|
57 switchToCoreLoop:
|
|
msach@71
|
58 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
59 movl $VPReturn , 0x14(%ecx) #store return address
|
|
msach@71
|
60 movl %esp , 0x0c(%ecx) #save stack pointer
|
|
msach@71
|
61 movl %ebp , 0x10(%ecx) #save frame pointer
|
|
msach@71
|
62 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
63 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
64 movl $_VMSMasterEnv, %ecx
|
|
msach@71
|
65 movl (%ecx) , %ecx
|
|
msach@71
|
66 movl 0x24(%ecx), %eax #get CoreLoopStartPt
|
|
msach@71
|
67 jmp *%eax #jmp to CoreLoop
|
|
msach@71
|
68 VPReturn:
|
|
msach@71
|
69 ret
|
|
msach@71
|
70
|
|
msach@71
|
71
|
|
msach@71
|
72
|
|
msach@71
|
73 //switches to core loop from master. saves return address
|
|
msach@73
|
74 //Releases masterLock so the next MasterLoop can be executed
|
|
msach@71
|
75 /* VirtProcr offsets:
|
|
msach@71
|
76 * 0xc stackPtr
|
|
msach@71
|
77 * 0x10 framePtr
|
|
msach@71
|
78 * 0x14 nextInstrPt
|
|
msach@71
|
79 * 0x1c coreLoopFramePtr
|
|
msach@71
|
80 * 0x20 coreLoopStackPtr
|
|
msach@71
|
81 *
|
|
msach@71
|
82 * _VMSMasterEnv offsets:
|
|
msach@73
|
83 * 0x24 coreLoopReturnPt
|
|
msach@73
|
84 * 0x2c masterLock
|
|
msach@71
|
85 */
|
|
msach@71
|
86 .globl masterSwitchToCoreLoop
|
|
msach@71
|
87 masterSwitchToCoreLoop:
|
|
msach@71
|
88 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
89 movl $MasterReturn, 0x14(%ecx) #store return address
|
|
msach@71
|
90 movl %esp , 0x0c(%ecx) #save stack pointer
|
|
msach@71
|
91 movl %ebp , 0x10(%ecx) #save frame pointer
|
|
msach@71
|
92 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
93 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
94 movl $_VMSMasterEnv, %ecx
|
|
msach@71
|
95 movl (%ecx) , %ecx
|
|
msach@71
|
96 movl 0x24(%ecx), %eax #get CoreLoopStartPt
|
|
msach@73
|
97 movl $0x0 , 0x2c(%ecx) #release lock
|
|
msach@71
|
98 jmp *%eax #jmp to CoreLoop
|
|
msach@71
|
99 MasterReturn:
|
|
msach@71
|
100 ret
|
|
msach@71
|
101
|
|
msach@71
|
102
|
|
msach@71
|
103 //Switch to terminateCoreLoop
|
|
msach@73
|
104 // no need to call because the stack is already set up for switchVP
|
|
msach@73
|
105 // and both functions have the same argument.
|
|
msach@73
|
106 // do not save register of VP because this function will never return
|
|
msach@71
|
107 /* VirtProcr offsets:
|
|
msach@71
|
108 * 0xc stackPtr
|
|
msach@71
|
109 * 0x10 framePtr
|
|
msach@71
|
110 * 0x14 nextInstrPt
|
|
msach@71
|
111 * 0x1c coreLoopFramePtr
|
|
msach@71
|
112 * 0x20 coreLoopStackPtr
|
|
msach@71
|
113 *
|
|
msach@71
|
114 * _VMSMasterEnv offsets:
|
|
msach@73
|
115 * 0x24 coreLoopReturnPt
|
|
msach@73
|
116 * 0x2c masterLock
|
|
msach@71
|
117 */
|
|
msach@71
|
118 .globl asmTerminateCoreLoop
|
|
msach@71
|
119 asmTerminateCoreLoop:
|
|
msach@71
|
120 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@71
|
121 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@71
|
122 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@71
|
123 movl $terminateCoreLoop, %eax
|
|
msach@75
|
124 jmp *%eax #jmp to CoreLoop
|
|
msach@75
|
125
|
|
msach@75
|
126
|
|
msach@75
|
127 /*
|
|
msach@75
|
128 * This one for the sequential version is special. It discards the current stack
|
|
msach@75
|
129 * and returns directly from the coreLoop after VMS__dissipate_procr was called
|
|
msach@75
|
130 */
|
|
msach@75
|
131 .globl asmTerminateCoreLoopSeq
|
|
msach@75
|
132 asmTerminateCoreLoopSeq:
|
|
msach@75
|
133 movl 0x4(%esp) , %ecx #get VirtProcr
|
|
msach@75
|
134 movl 0x20(%ecx), %esp #restore stack pointer
|
|
msach@75
|
135 movl 0x1c(%ecx), %ebp #restore frame pointer
|
|
msach@75
|
136 sub $0x4 , %esp
|
|
msach@75
|
137 movl %ecx , (%esp) #put argument on stack
|
|
msach@75
|
138 call VMS__dissipate_procr
|
|
msach@75
|
139 movl %ebp , %esp #goto the coreLoops stack
|
|
msach@75
|
140 pop %ebp #restore the old framepointer
|
|
msach@75
|
141 ret #return from core loop
|
|
msach@71
|
142
|