msach@71: .data msach@71: msach@71: msach@71: .text msach@71: msach@71: //Save return label address for the coreLoop to pointer msach@71: //Arguments: Pointer to variable holding address msach@71: .globl saveCoreLoopReturnAddr msach@71: saveCoreLoopReturnAddr: msach@71: movl 0x4(%esp) , %eax #load pointer msach@71: movl $coreLoopReturn, %ecx #load label address msach@71: movl %ecx, (%eax) #save address msach@71: ret msach@71: msach@71: msach@71: msach@71: //Switches form CoreLoop to VP ether normal VP or the Master Loop msach@71: //switch to virt procr's stack and frame ptr then jump to virt procr fn msach@71: /* VirtProcr offsets: msach@71: * 0xc stackPtr msach@71: * 0x10 framePtr msach@71: * 0x14 nextInstrPt msach@71: * 0x1c coreLoopFramePtr msach@71: * 0x20 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@73: * 0x24 coreLoopReturnPt msach@73: * 0x2c masterLock msach@71: */ msach@71: .globl switchToVP msach@71: switchToVP: msach@71: movl 0x4(%esp) , %ecx #get VirtProcr msach@71: movl %esp , 0x20(%ecx) #save core loop stack pointer msach@71: movl %ebp , 0x1c(%ecx) #save core loop frame pointer msach@71: movl 0x0c(%ecx), %esp #restore stack pointer msach@71: movl 0x10(%ecx), %ebp #restore frame pointer msach@71: movl 0x14(%ecx), %eax #get jmp pointer msach@71: jmp *%eax #jmp to VP msach@71: coreLoopReturn: msach@71: ret msach@71: msach@71: msach@71: //switches to core loop. saves return address msach@71: /* VirtProcr offsets: msach@71: * 0xc stackPtr msach@71: * 0x10 framePtr msach@71: * 0x14 nextInstrPt msach@71: * 0x1c coreLoopFramePtr msach@71: * 0x20 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@73: * 0x24 coreLoopReturnPt msach@71: * 0x28 coreLoopEndPt msach@73: * 0x2c masterLock msach@71: */ msach@71: .globl switchToCoreLoop msach@71: switchToCoreLoop: msach@71: movl 0x4(%esp) , %ecx #get VirtProcr msach@71: movl $VPReturn , 0x14(%ecx) #store return address msach@71: movl %esp , 0x0c(%ecx) #save stack pointer msach@71: movl %ebp , 0x10(%ecx) #save frame pointer msach@71: movl 0x20(%ecx), %esp #restore stack pointer msach@71: movl 0x1c(%ecx), %ebp #restore frame pointer msach@71: movl $_VMSMasterEnv, %ecx msach@71: movl (%ecx) , %ecx msach@71: movl 0x24(%ecx), %eax #get CoreLoopStartPt msach@71: jmp *%eax #jmp to CoreLoop msach@71: VPReturn: msach@71: ret msach@71: msach@71: msach@71: msach@71: //switches to core loop from master. saves return address msach@73: //Releases masterLock so the next MasterLoop can be executed msach@71: /* VirtProcr offsets: msach@71: * 0xc stackPtr msach@71: * 0x10 framePtr msach@71: * 0x14 nextInstrPt msach@71: * 0x1c coreLoopFramePtr msach@71: * 0x20 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@73: * 0x24 coreLoopReturnPt msach@73: * 0x2c masterLock msach@71: */ msach@71: .globl masterSwitchToCoreLoop msach@71: masterSwitchToCoreLoop: msach@71: movl 0x4(%esp) , %ecx #get VirtProcr msach@71: movl $MasterReturn, 0x14(%ecx) #store return address msach@71: movl %esp , 0x0c(%ecx) #save stack pointer msach@71: movl %ebp , 0x10(%ecx) #save frame pointer msach@71: movl 0x20(%ecx), %esp #restore stack pointer msach@71: movl 0x1c(%ecx), %ebp #restore frame pointer msach@71: movl $_VMSMasterEnv, %ecx msach@71: movl (%ecx) , %ecx msach@71: movl 0x24(%ecx), %eax #get CoreLoopStartPt msach@73: movl $0x0 , 0x2c(%ecx) #release lock msach@71: jmp *%eax #jmp to CoreLoop msach@71: MasterReturn: msach@71: ret msach@71: msach@71: msach@71: //Switch to terminateCoreLoop msach@73: // no need to call because the stack is already set up for switchVP msach@73: // and both functions have the same argument. msach@73: // do not save register of VP because this function will never return msach@71: /* VirtProcr offsets: msach@71: * 0xc stackPtr msach@71: * 0x10 framePtr msach@71: * 0x14 nextInstrPt msach@71: * 0x1c coreLoopFramePtr msach@71: * 0x20 coreLoopStackPtr msach@71: * msach@71: * _VMSMasterEnv offsets: msach@73: * 0x24 coreLoopReturnPt msach@73: * 0x2c masterLock msach@71: */ msach@71: .globl asmTerminateCoreLoop msach@71: asmTerminateCoreLoop: msach@71: movl 0x4(%esp) , %ecx #get VirtProcr msach@71: movl 0x20(%ecx), %esp #restore stack pointer msach@71: movl 0x1c(%ecx), %ebp #restore frame pointer msach@71: movl $terminateCoreLoop, %eax msach@75: jmp *%eax #jmp to CoreLoop msach@75: msach@75: msach@75: /* msach@75: * This one for the sequential version is special. It discards the current stack msach@75: * and returns directly from the coreLoop after VMS__dissipate_procr was called msach@75: */ msach@75: .globl asmTerminateCoreLoopSeq msach@75: asmTerminateCoreLoopSeq: msach@75: movl 0x4(%esp) , %ecx #get VirtProcr msach@75: movl 0x20(%ecx), %esp #restore stack pointer msach@75: movl 0x1c(%ecx), %ebp #restore frame pointer msach@75: sub $0x4 , %esp msach@75: movl %ecx , (%esp) #put argument on stack msach@75: call VMS__dissipate_procr msach@75: movl %ebp , %esp #goto the coreLoops stack msach@75: pop %ebp #restore the old framepointer msach@75: ret #return from core loop msach@71: