msach@71: .data msach@71: msach@71: msach@71: .text msach@71: msach@71: //Save return label address for the coreLoop to pointer msach@71: //Arguments: Pointer to variable holding address msach@71: .globl saveCoreLoopReturnAddr msach@71: saveCoreLoopReturnAddr: msach@76: movq $coreLoopReturn, %rcx #load label address msach@76: movq %rcx, (%rdi) #save address to pointer msach@71: ret msach@71: msach@71: msach@76: //Initializes VirtProcrFn at first run for 64 bit mode msach@76: //Puts argument from stack into registers msach@76: .globl startVirtProcrFn msach@76: startVirtProcrFn: msach@76: movq %rdi , %rsi #get second argument from first argument of switchVP msach@76: movq 0x08(%rsp), %rdi #get first argument msach@76: movq (%rsp) , %rax #get function addr msach@76: jmp *%rax msach@71: msach@76: //Switches form CoreLoop to VP ether a normal VP or the Master Loop msach@71: //switch to virt procr's stack and frame ptr then jump to virt procr fn msach@71: /* VirtProcr offsets: msach@76: * 0x10 stackPtr msach@76: * 0x18 framePtr msach@76: * 0x20 nextInstrPt msach@76: * 0x30 coreLoopFramePtr msach@76: * 0x38 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@76: * 0x48 coreLoopReturnPt msach@76: * 0x54 masterLock msach@71: */ msach@71: .globl switchToVP msach@71: switchToVP: msach@76: #VirtProcr in %rdi msach@76: movq %rsp , 0x38(%rdi) #save core loop stack pointer msach@76: movq %rbp , 0x30(%rdi) #save core loop frame pointer msach@76: movq 0x10(%rdi), %rsp #restore stack pointer msach@76: movq 0x18(%rdi), %rbp #restore frame pointer msach@76: movq 0x20(%rdi), %rax #get jmp pointer msach@76: jmp *%rax #jmp to VP msach@71: coreLoopReturn: msach@71: ret msach@71: msach@71: msach@71: //switches to core loop. saves return address msach@71: /* VirtProcr offsets: msach@76: * 0x10 stackPtr msach@76: * 0x18 framePtr msach@76: * 0x20 nextInstrPt msach@76: * 0x30 coreLoopFramePtr msach@76: * 0x38 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@76: * 0x48 coreLoopReturnPt msach@76: * 0x54 masterLock msach@71: */ msach@71: .globl switchToCoreLoop msach@71: switchToCoreLoop: msach@76: #VirtProcr in %rdi msach@76: movq $VPReturn , 0x20(%rdi) #store return address msach@76: movq %rsp , 0x10(%rdi) #save stack pointer msach@76: movq %rbp , 0x18(%rdi) #save frame pointer msach@76: movq 0x38(%rdi), %rsp #restore stack pointer msach@76: movq 0x30(%rdi), %rbp #restore frame pointer msach@76: movq $_VMSMasterEnv, %rcx msach@76: movq (%rcx) , %rcx msach@76: movq 0x48(%rcx), %rax #get CoreLoopStartPt msach@76: jmp *%rax #jmp to CoreLoop msach@71: VPReturn: msach@71: ret msach@71: msach@71: msach@71: msach@71: //switches to core loop from master. saves return address msach@73: //Releases masterLock so the next MasterLoop can be executed msach@71: /* VirtProcr offsets: msach@76: * 0x10 stackPtr msach@76: * 0x18 framePtr msach@76: * 0x20 nextInstrPt msach@76: * 0x30 coreLoopFramePtr msach@76: * 0x38 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@76: * 0x48 coreLoopReturnPt msach@76: * 0x54 masterLock msach@71: */ msach@71: .globl masterSwitchToCoreLoop msach@71: masterSwitchToCoreLoop: msach@76: #VirtProcr in %rdi msach@76: movq $MasterReturn, 0x20(%rdi) #store return address msach@76: movq %rsp , 0x10(%rdi) #save stack pointer msach@76: movq %rbp , 0x18(%rdi) #save frame pointer msach@76: movq 0x38(%rdi), %rsp #restore stack pointer msach@76: movq 0x30(%rdi), %rbp #restore frame pointer msach@76: movq $_VMSMasterEnv, %rcx msach@76: movq (%rcx) , %rcx msach@76: movq 0x48(%rcx), %rax #get CoreLoopStartPt msach@76: movl $0x0 , 0x54(%rcx) #release lock msach@76: jmp *%rax #jmp to CoreLoop msach@71: MasterReturn: msach@71: ret msach@71: msach@71: msach@71: //Switch to terminateCoreLoop msach@76: //therefor switch to coreLoop context from master context msach@73: // no need to call because the stack is already set up for switchVP msach@76: // and virtPr is in %rdi msach@73: // and both functions have the same argument. msach@73: // do not save register of VP because this function will never return msach@71: /* VirtProcr offsets: msach@76: * 0x10 stackPtr msach@76: * 0x18 framePtr msach@76: * 0x20 nextInstrPt msach@76: * 0x30 coreLoopFramePtr msach@76: * 0x38 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@76: * 0x48 coreLoopReturnPt msach@76: * 0x58 masterLock msach@71: */ msach@71: .globl asmTerminateCoreLoop msach@71: asmTerminateCoreLoop: msach@76: #VirtProcr in %rdi msach@76: movq 0x38(%rdi), %rsp #restore stack pointer msach@76: movq 0x30(%rdi), %rbp #restore frame pointer msach@76: movq $terminateCoreLoop, %rax msach@76: jmp *%rax #jmp to CoreLoop msach@75: msach@75: msach@75: /* msach@75: * This one for the sequential version is special. It discards the current stack msach@75: * and returns directly from the coreLoop after VMS__dissipate_procr was called msach@75: */ msach@75: .globl asmTerminateCoreLoopSeq msach@75: asmTerminateCoreLoopSeq: msach@76: #VirtProcr in %rdi msach@76: movq 0x38(%rdi), %rsp #restore stack pointer msach@76: movq 0x30(%rdi), %rbp #restore frame pointer msach@76: #argument is in %rdi msach@75: call VMS__dissipate_procr msach@76: movq %rbp , %rsp #goto the coreLoops stack msach@76: pop %rbp #restore the old framepointer msach@75: ret #return from core loop msach@71: