| rev |
line source |
|
msach@71
|
1 .data
|
|
msach@71
|
2
|
|
msach@71
|
3
|
|
msach@71
|
4 .text
|
|
msach@132
|
5 /* VirtProcr offsets:
|
|
msach@132
|
6 * 0x10 stackPtr
|
|
msach@132
|
7 * 0x18 framePtr
|
|
msach@132
|
8 * 0x20 nextInstrPt
|
|
msach@132
|
9 * 0x30 coreLoopFramePtr
|
|
msach@132
|
10 * 0x38 coreLoopStackPtr
|
|
msach@132
|
11 *
|
|
msach@132
|
12 * _VMSMasterEnv offsets:
|
|
msach@132
|
13 * 0x38 coreLoopReturnPt
|
|
msach@132
|
14 * 0x44 masterLock
|
|
msach@132
|
15 */
|
|
msach@71
|
16 //Save return label address for the coreLoop to pointer
|
|
msach@71
|
17 //Arguments: Pointer to variable holding address
|
|
msach@71
|
18 .globl saveCoreLoopReturnAddr
|
|
msach@71
|
19 saveCoreLoopReturnAddr:
|
|
msach@76
|
20 movq $coreLoopReturn, %rcx #load label address
|
|
msach@76
|
21 movq %rcx, (%rdi) #save address to pointer
|
|
msach@71
|
22 ret
|
|
msach@71
|
23
|
|
msach@71
|
24
|
|
msach@76
|
25 //Initializes VirtProcrFn at first run for 64 bit mode
|
|
msach@76
|
26 //Puts argument from stack into registers
|
|
msach@76
|
27 .globl startVirtProcrFn
|
|
msach@76
|
28 startVirtProcrFn:
|
|
msach@76
|
29 movq %rdi , %rsi #get second argument from first argument of switchVP
|
|
msach@76
|
30 movq 0x08(%rsp), %rdi #get first argument
|
|
msach@76
|
31 movq (%rsp) , %rax #get function addr
|
|
msach@76
|
32 jmp *%rax
|
|
msach@71
|
33
|
|
msach@76
|
34 //Switches form CoreLoop to VP ether a normal VP or the Master Loop
|
|
msach@71
|
35 //switch to virt procr's stack and frame ptr then jump to virt procr fn
|
|
msach@71
|
36 .globl switchToVP
|
|
msach@71
|
37 switchToVP:
|
|
msach@76
|
38 #VirtProcr in %rdi
|
|
msach@76
|
39 movq %rsp , 0x38(%rdi) #save core loop stack pointer
|
|
msach@76
|
40 movq %rbp , 0x30(%rdi) #save core loop frame pointer
|
|
msach@76
|
41 movq 0x10(%rdi), %rsp #restore stack pointer
|
|
msach@76
|
42 movq 0x18(%rdi), %rbp #restore frame pointer
|
|
msach@76
|
43 movq 0x20(%rdi), %rax #get jmp pointer
|
|
msach@76
|
44 jmp *%rax #jmp to VP
|
|
msach@71
|
45 coreLoopReturn:
|
|
msach@71
|
46 ret
|
|
msach@71
|
47
|
|
msach@71
|
48
|
|
msach@71
|
49 //switches to core loop. saves return address
|
|
msach@71
|
50 .globl switchToCoreLoop
|
|
msach@71
|
51 switchToCoreLoop:
|
|
msach@76
|
52 #VirtProcr in %rdi
|
|
msach@76
|
53 movq $VPReturn , 0x20(%rdi) #store return address
|
|
msach@76
|
54 movq %rsp , 0x10(%rdi) #save stack pointer
|
|
msach@76
|
55 movq %rbp , 0x18(%rdi) #save frame pointer
|
|
msach@76
|
56 movq 0x38(%rdi), %rsp #restore stack pointer
|
|
msach@76
|
57 movq 0x30(%rdi), %rbp #restore frame pointer
|
|
msach@76
|
58 movq $_VMSMasterEnv, %rcx
|
|
msach@76
|
59 movq (%rcx) , %rcx
|
|
msach@132
|
60 movq 0x38(%rcx), %rax #get CoreLoopStartPt
|
|
msach@76
|
61 jmp *%rax #jmp to CoreLoop
|
|
msach@71
|
62 VPReturn:
|
|
msach@71
|
63 ret
|
|
msach@71
|
64
|
|
msach@71
|
65
|
|
msach@71
|
66
|
|
msach@71
|
67 //switches to core loop from master. saves return address
|
|
msach@73
|
68 //Releases masterLock so the next MasterLoop can be executed
|
|
msach@71
|
69 .globl masterSwitchToCoreLoop
|
|
msach@71
|
70 masterSwitchToCoreLoop:
|
|
msach@76
|
71 #VirtProcr in %rdi
|
|
msach@76
|
72 movq $MasterReturn, 0x20(%rdi) #store return address
|
|
msach@76
|
73 movq %rsp , 0x10(%rdi) #save stack pointer
|
|
msach@76
|
74 movq %rbp , 0x18(%rdi) #save frame pointer
|
|
msach@76
|
75 movq 0x38(%rdi), %rsp #restore stack pointer
|
|
msach@76
|
76 movq 0x30(%rdi), %rbp #restore frame pointer
|
|
msach@76
|
77 movq $_VMSMasterEnv, %rcx
|
|
msach@76
|
78 movq (%rcx) , %rcx
|
|
msach@132
|
79 movq 0x38(%rcx), %rax #get CoreLoopStartPt
|
|
msach@132
|
80 movl $0x0 , 0x44(%rcx) #release lock
|
|
msach@76
|
81 jmp *%rax #jmp to CoreLoop
|
|
msach@71
|
82 MasterReturn:
|
|
msach@71
|
83 ret
|
|
msach@71
|
84
|
|
msach@71
|
85
|
|
msach@71
|
86 //Switch to terminateCoreLoop
|
|
msach@76
|
87 //therefor switch to coreLoop context from master context
|
|
msach@73
|
88 // no need to call because the stack is already set up for switchVP
|
|
msach@76
|
89 // and virtPr is in %rdi
|
|
msach@73
|
90 // and both functions have the same argument.
|
|
msach@73
|
91 // do not save register of VP because this function will never return
|
|
msach@71
|
92 .globl asmTerminateCoreLoop
|
|
msach@71
|
93 asmTerminateCoreLoop:
|
|
msach@76
|
94 #VirtProcr in %rdi
|
|
msach@76
|
95 movq 0x38(%rdi), %rsp #restore stack pointer
|
|
msach@76
|
96 movq 0x30(%rdi), %rbp #restore frame pointer
|
|
msach@76
|
97 movq $terminateCoreLoop, %rax
|
|
msach@76
|
98 jmp *%rax #jmp to CoreLoop
|
|
msach@75
|
99
|
|
msach@75
|
100
|
|
msach@75
|
101 /*
|
|
msach@75
|
102 * This one for the sequential version is special. It discards the current stack
|
|
msach@75
|
103 * and returns directly from the coreLoop after VMS__dissipate_procr was called
|
|
msach@75
|
104 */
|
|
msach@75
|
105 .globl asmTerminateCoreLoopSeq
|
|
msach@75
|
106 asmTerminateCoreLoopSeq:
|
|
msach@76
|
107 #VirtProcr in %rdi
|
|
msach@76
|
108 movq 0x38(%rdi), %rsp #restore stack pointer
|
|
msach@76
|
109 movq 0x30(%rdi), %rbp #restore frame pointer
|
|
msach@76
|
110 #argument is in %rdi
|
|
msach@75
|
111 call VMS__dissipate_procr
|
|
msach@76
|
112 movq %rbp , %rsp #goto the coreLoops stack
|
|
msach@76
|
113 pop %rbp #restore the old framepointer
|
|
msach@75
|
114 ret #return from core loop
|
|
msach@71
|
115
|