## diffname bitsy/l.s 2000/0831
## diff -e /dev/null /n/emeliedump/2000/0831/sys/src/9/bitsy/l.s
0a
#include "mem.h"
#include "arm7500.h"
#include "io.h"
/*
* Entered here from the boot loader with
* MMU, IDC and WB enabled.
*/
TEXT _startup(SB), $-4
MOVW $setR12(SB), R12
_main:
MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1 /* SVC mode, interrupts disabled */
MOVW R1, CPSR
MOVW $(MACHADDR+BY2PG), R13 /* stack */
SUB $4, R13 /* link */
BL main(SB)
_mainloop:
BEQ _mainloop
BNE _mainloop
BL _div(SB) /* loader botch */
BL _mainloop
TEXT mmuregr(SB), $-4
CMP $CpCPUID, R0
BNE _fsrr
MRC CpMMU, 0, R0, C(CpCPUID), C(0)
RET
_fsrr:
CMP $CpFSR, R0
BNE _farr
MRC CpMMU, 0, R0, C(CpFSR), C(0)
RET
_farr:
CMP $CpFAR, R0
BNE _ctlr
MRC CpMMU, 0, R0, C(CpFAR), C(0)
RET
_ctlr:
CMP $CpControl, R0
BNE _mmuregbad
MCR CpMMU, 0, R0, C(CpControl), C(0)
RET
_mmuregbad:
MOVW $-1, R0
RET
TEXT mmuregw(SB), $-4
CMP $CpControl, R0
BNE _ttbw
MOVW 4(FP), R0
MCR CpMMU, 0, R0, C(CpControl), C(0)
RET
_ttbw:
CMP $CpTTB, R0
BNE _dacw
MOVW 4(FP), R0
MCR CpMMU, 0, R0, C(CpTTB), C(0)
RET
_dacw:
CMP $CpDAC, R0
BNE _TLBflushw
MOVW 4(FP), R0
MCR CpMMU, 0, R0, C(CpDAC), C(0)
RET
_TLBflushw:
CMP $CpTLBflush, R0
BNE _TLBpurgew
MCR CpMMU, 0, R0, C(CpTLBflush), C(0)
RET
_TLBpurgew:
CMP $CpTLBpurge, R0
BNE _IDCflushw
MOVW 4(FP), R0
MCR CpMMU, 0, R0, C(CpTLBpurge), C(0)
RET
_IDCflushw:
CMP $CpIDCflush, R0
BNE _WBdrain
MCR CpMMU, 0, R0, C(CpIDCflush), C(CpIDCflush)
RET
_WBdrain:
CMP $CpWBdrain, R0
BNE _mmuregbad
MCR CpMMU, 4, R0, C(CpIDCflush), C(CpWBdrain), 4
RET
TEXT mmuttb(SB), $-4
MCR CpMMU, 0, R0, C(CpIDCflush), C(CpIDCflush)
MCR CpMMU, 0, R0, C(CpTLBflush), C(0)
MCR CpMMU, 0, R0, C(CpTTB), C(0)
RET
TEXT mmureset(SB), $-4
MOVW CPSR, R0
ORR $(PsrDfiq|PsrDirq), R0, R0
MOVW R0, CPSR
MOVW $0, R0
MOVW $(CpCsystem), R1
MCR CpMMU, 0, R1, C(CpControl), C(0)
B (R0)
TEXT setr13(SB), $-4
MOVW 4(FP), R1
MOVW CPSR, R2
BIC $PsrMask, R2, R3
ORR R0, R3
MOVW R3, CPSR
MOVW R13, R0
MOVW R1, R13
MOVW R2, CPSR
RET
TEXT vectors(SB), $-4
MOVW 0x18(R15), R15 /* reset */
MOVW 0x18(R15), R15 /* undefined */
MOVW 0x18(R15), R15 /* SWI */
MOVW 0x18(R15), R15 /* prefetch abort */
MOVW 0x18(R15), R15 /* data abort */
MOVW 0x18(R15), R15 /* reserved */
MOVW 0x18(R15), R15 /* IRQ */
MOVW 0x18(R15), R15 /* FIQ */
TEXT vtable(SB), $-4
WORD $_vsvc(SB) /* reset, in svc mode already */
WORD $_vund(SB) /* undefined, switch to svc mode */
WORD $_vsvc(SB) /* swi, in svc mode already */
WORD $_vpab(SB) /* prefetch abort, switch to svc mode */
WORD $_vdab(SB) /* data abort, switch to svc mode */
WORD $_vsvc(SB) /* reserved */
WORD $_virq(SB) /* IRQ, switch to svc mode */
WORD $_vfiq(SB) /* FIQ, switch to svc mode */
TEXT _vund(SB), $-4 /* undefined */
MOVM.IA [R0-R3], (R13)
MOVW $PsrMund, R0
B _vswitch
TEXT _vsvc(SB), $-4 /* reset or SWI or reserved */
SUB $12, R13
MOVW R14, 8(R13)
MOVW CPSR, R14
MOVW R14, 4(R13)
MOVW $PsrMsvc, R14
MOVW R14, (R13)
MOVW 8(R13), R14
B _vsaveu
TEXT _vpab(SB), $-4 /* prefetch abort */
MOVM.IA [R0-R3], (R13)
MOVW $PsrMabt, R0
B _vswitch
TEXT _vdab(SB), $-4 /* data abort */
MOVM.IA [R0-R3], (R13)
MOVW $(PsrMabt+1), R0
B _vswitch
TEXT _virq(SB), $-4 /* IRQ */
MOVM.IA [R0-R3], (R13)
MOVW $PsrMirq, R0
B _vswitch
TEXT _vfiq(SB), $-4 /* FIQ */
MOVM.IA [R0-R3], (R13)
MOVW $PsrMfiq, R0
B _vswitch
_vswitch: /* switch to svc, type in R0 */
MOVW SPSR, R1 /* psr for ureg */
MOVW R14, R2 /* saved pc for ureg */
MOVW R13, R3 /* [R0-R3] save area */
MOVW CPSR, R14 /* switch */
BIC $PsrMask, R14
ORR $(PsrDirq|PsrDfiq|PsrMsvc), R14
MOVW R14, CPSR
MOVM.DB.W [R0-R2], (R13) /* top of ureg */
MOVM.IA (R3), [R0-R3] /* restore [R0-R3] */
B _vsaveu
_vsaveu:
SUB $4, R13 /* save link */
MOVW R14, (R13)
MOVM.DB.W [R0-R14], (R13) /* save svc registers */
MOVW $setR12(SB), R12 /* safety */
MOVW R13, R0 /* argument is &ureg */
SUB $8, R13 /* space for argument+link */
BL exception(SB)
_vrfe:
ADD $(8+4*15), R13 /* [r0-R14]+argument+link */
MOVW (R13), R14 /* restore link */
MOVW 8(R13), R0 /* restore SPSR */
MOVW R0, SPSR
MOVM.DB (R13), [R0-R14] /* restore registers */
ADD $12, R13 /* skip saved link+type+SPSR */
RFE /* MOVM.IA.S.W (R13), [R15] */
TEXT splhi(SB), $-4
MOVW CPSR, R0
ORR $(PsrDfiq|PsrDirq), R0, R1
MOVW R1, CPSR
RET
TEXT spllo(SB), $-4
MOVW CPSR, R0
BIC $(PsrDfiq|PsrDirq), R0, R1
MOVW R1, CPSR
RET
TEXT splx(SB), $-4
MOVW R0, R1
MOVW CPSR, R0
MOVW R1, CPSR
RET
TEXT islo(SB), $-4
MOVW CPSR, R0
AND $(PsrDfiq|PsrDirq), R0
EOR $(PsrDfiq|PsrDirq), R0
RET
TEXT _exit(SB), $-4
MRC CpMMU, 0, R1, C(CpControl), C(0), 0 /* Read MMUCR */
BIC $MMUCR_M_ENABLE, R1 /* Clear MMU Enable bit */
MCR CpMMU, 0, R1, C(CpControl), C(0), 0 /* Write to MMU CR */
MCR CpMMU, 0, R1, C(CpIDCflush), C(7) /* Flush (inval) I,D-cache */
B (R0)
TEXT cpsrr(SB), $-4
MOVW CPSR, R0
RET
TEXT spsrr(SB), $-4
MOVW SPSR, R0
RET
TEXT aamloop(SB), $-4 /* 3 */
_aamloop:
MOVW R0, R0 /* 1 */
MOVW R0, R0 /* 1 */
MOVW R0, R0 /* 1 */
SUB $1, R0 /* 1 */
CMP $0, R0 /* 1 */
BNE _aamloop /* 3 */
RET /* 3 */
TEXT getcallerpc(SB), $-4
MOVW 0(R13), R0
RET
TEXT tas(SB), $-4
MOVW R0, R1
MOVW $0xDEADDEAD, R2
SWPW R2, (R1), R0
RET
TEXT setlabel(SB), $-4
MOVW R13, 0(R0) /* sp */
MOVW R14, 4(R0) /* pc */
MOVW $0, R0
RET
TEXT gotolabel(SB), $-4
MOVW 0(R0), R13 /* sp */
MOVW 4(R0), R14 /* pc */
MOVW $1, R0
RET
TEXT mmuctlregr(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0)
RET
TEXT mmuctlregw(SB), $-4
MCR CpMMU, 0, R0, C(CpControl), C(0)
MOVW R0, R0
MOVW R0, R0
RET
TEXT flushIcache(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheCtl), C(5), 0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
RET
TEXT cleanDentry(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheCtl), C(10), 1
RET
TEXT flushDentry(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheCtl), C(6), 1
RET
TEXT drainWBuffer(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheCtl), C(10), 4
RET
TEXT writeBackDC(SB), $-4
MOVW $0xE0000000, R0
MOVW $8192, R1
ADD R0, R1
wbflush:
MOVW.P.W 32(R0), R2
CMP R1,R0
BNE wbflush
RET
TEXT flushDcache(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheCtl), C(6), 0
RET
TEXT writeBackBDC(SB), $-4
MOVW $0xE4000000, R0
MOVW $0x200, R1
ADD R0, R1
wbbflush:
MOVW.P.W 32(R0), R2
CMP R1,R0
BNE wbbflush
MCR CpMMU, 0, R0, C(CpCacheCtl), C(10), 4
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
RET
TEXT flushIDC(SB), $-4
/*BUG*/
BL drainWBuffer(SB)
BL writeBackDC(SB)
BL flushDcache(SB)
BL flushIcache(SB)
RET
.
## diffname bitsy/l.s 2000/0901
## diff -e /n/emeliedump/2000/0831/sys/src/9/bitsy/l.s /n/emeliedump/2000/0901/sys/src/9/bitsy/l.s
296,354d
240,246d
79,113d
73,77c
/* st the translation table base */
TEXT setttb(SB), $-4
MCR CpMMU, 0, R0, C(CpTTB), C(0x0)
.
66,70c
/* return fault address */
TEXT getfar(SB), $-4
MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
.
59,63c
/* return fault status */
TEXT getfsr(SB), $-4
MRC CpMMU, 0, R0, C(CpFSR), C(0x0)
.
52,56c
/* return cpu id */
TEXT getcpuid(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
.
48,49c
/* drain write buffer */
TEXT drainwb(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0), 4
.
42,45c
/* flush i and d caches */
TEXT flushcache(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0)
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
.
36,39c
/* flush data cache */
TEXT flushdcache(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0)
.
30,33c
/* flush instruction cache */
TEXT flushicache(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0)
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
.
24,27c
/* flush tlb's */
TEXT flushmmu(SB), $-4
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x0)
.
19,22c
B _mainloop
.
17c
BL exit(SB)
/* we shouldn't get here */
.
13a
/* turn on caches and write buffer */
MRC CpMMU, 0, R1, C(CpControl), C(0x0)
ORR $(CpCdcache|CpCwb), R1
MCR CpMMU, 0, R1, C(CpControl), C(0x0)
.
12c
/* SVC mode, interrupts disabled */
MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
.
9,10c
TEXT _start(SB), $-4
MOVW $setR12(SB), R12 /* load the SB */
.
6,7c
* Entered here from Compaq's bootldr with MMU disabled.
.
2c
#include "sa1110.h"
.
## diffname bitsy/l.s 2000/0902
## diff -e /n/emeliedump/2000/0901/sys/src/9/bitsy/l.s /n/emeliedump/2000/0902/sys/src/9/bitsy/l.s
196a
MOVW R0, R1
MOVW CPSR, R0
MOVW R1, CPSR
RET
TEXT splxpc(SB), $0 /* for iunlock */
.
180c
MOVM.DB.S (R13), [R0-R14] /* restore registers */
.
170c
MOVW $setR12(SB), R12 /* the SB from user mode is different */
.
168c
MOVM.DB.W.S [R0-R14], (R13) /* save svc registers */
.
164a
/* push the registers as in ureg */
.
24a
BL _div(SB) /* hack to get _div etc loaded */
.
19a
/* turn off interrupts */
BL splhi(SB)
.
## diffname bitsy/l.s 2000/0903
## diff -e /n/emeliedump/2000/0902/sys/src/9/bitsy/l.s /n/emeliedump/2000/0903/sys/src/9/bitsy/l.s
180,186c
_vrfe:
ADD $(8+4*15), R13 /* r13 points to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
MOVW R0, SPSR /* ... */
MOVM.DB.S (R13), [R0-R14] /* restore registers */
ADD $8, R13 /* skip saved type+SPSR */
.
176c
MOVW R13, R0 /* first arg is pointer to ureg */
.
171,173c
MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, r13 points to ureg */
.
169c
/*
* come here with R13 pointing to ureg->type
*/
.
165,166c
MOVM.DB.W [R0-R2], (R13) /* set ureg->{pc, psr, type}; r13 points to ureg->type */
MOVM.IA (R3), [R0-R3] /* restore [R0-R3] from previous mode's stack */
.
160c
/* switch to svc mode, we get new R13 pointing to top of svc stack */
MOVW CPSR, R14
.
156,157c
MOVW SPSR, R1 /* SPSR for ureg */
MOVW R14, R2 /* interrupted pc for ureg */
.
154a
/*
* come here with type in R0 and R13 pointing above saved [r0-r3]
*/
.
151,152c
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $PsrMfiq, R0 /* r0 = type */
.
146,147c
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $PsrMirq, R0 /* r0 = type */
.
141,143c
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $(PsrMabt+1), R0 /* r0 = type */
B _vswitch /*
.
136,137c
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $PsrMabt, R0 /* r0 = type */
.
126,132c
SUB $12, R13 /* make room for pc, psr, & type */
MOVW R14, 8(R13) /* ureg->pc = interupted PC */
MOVW SPSR, R14 /* ureg->psr = SPSR */
MOVW R14, 4(R13) /* ... */
MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
MOVW R14, (R13) /* ... */
.
## diffname bitsy/l.s 2000/0904
## diff -e /n/emeliedump/2000/0903/sys/src/9/bitsy/l.s /n/emeliedump/2000/0904/sys/src/9/bitsy/l.s
190c
ADD $8, R13 /* pop past ureg->{type+psr} */
.
185c
ADD $(8+4*15), R13 /* make r13 point to ureg->type */
.
178,179c
/*
* call the exception routine, the ureg is at the bottom of the stack
*/
_vexcep:
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
.
175,176c
MOVW 0x40(R13), R1
AND.S $0xf, R1
MOVW.NE R14,0x38(R13)
B _vexcep
.
172,173c
/*
* if the original interrupt happened while executing SVC mode, the User R14 in the Ureg is
* wrong. We need to save the SVC one there.
.
170c
MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
.
167a
/*
* R13 and R14 is now R13_SVC and R14_SVC. The values of the previous mode's
* R13 and R14 are no longer accessible. That's why R3 was left to point to where
* the old [r0-r3] are stored.
*/
.
162c
/* switch to svc mode */
.
158,160c
MOVW SPSR, R1 /* save SPSR for ureg */
MOVW R14, R2 /* save interrupted pc for ureg */
MOVW R13, R3 /* save pointer to where the original [R0-R3] are */
.
153a
TEXT _vfiq(SB), $-4 /* FIQ */
RFE /* RIQ is special, ignore it for now */
.
149,151c
TEXT _virq(SB), $-4 /* IRQ */
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $PsrMirq, R0 /* r0 = type */
.
139,146c
TEXT _vdab(SB), $-4 /* data abort */
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $(PsrMabt+1), R0 /* r0 = type */
.
125,136c
TEXT _vpab(SB), $-4 /* prefetch abort */
MOVM.IA [R0-R3], (R13) /* free some working space */
MOVW $PsrMabt, R0 /* r0 = type */
.
120,121c
TEXT _vund(SB), $-4 /* undefined */
MOVM.IA [R0-R3], (R13) /* free some working space */
.
110,118c
TEXT _vsvc(SB), $-4 /* reset or SWI or reserved */
SUB $12, R13 /* make room for pc, psr, & type */
MOVW R14, 8(R13) /* ureg->pc = interupted PC */
MOVW SPSR, R14 /* ureg->psr = SPSR */
MOVW R14, 4(R13) /* ... */
MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
MOVW R14, (R13) /* ... */
MOVM.DB.W.S [R0-R14], (R13) /* save user level registers, at end r13 points to ureg */
B _vexcep /* call the exception handler */
.
101,108c
MOVW 0x18(R15), R15 /* reset */
MOVW 0x18(R15), R15 /* undefined */
MOVW 0x18(R15), R15 /* SWI */
MOVW 0x18(R15), R15 /* prefetch abort */
MOVW 0x18(R15), R15 /* data abort */
MOVW 0x18(R15), R15 /* reserved */
MOVW 0x18(R15), R15 /* IRQ */
MOVW 0x18(R15), R15 /* FIQ */
WORD $_vsvc(SB) /* reset, in svc mode already */
WORD $_vund(SB) /* undefined, switch to svc mode */
WORD $_vsvc(SB) /* swi, in svc mode already */
WORD $_vpab(SB) /* prefetch abort, switch to svc mode */
WORD $_vdab(SB) /* data abort, switch to svc mode */
WORD $_vsvc(SB) /* reserved */
WORD $_virq(SB) /* IRQ, switch to svc mode */
WORD $_vfiq(SB) /* FIQ, switch to svc mode */
.
99a
/*
* exception vectors, copied by trapinit() to somewhere useful
*/
.
85a
/*
* set the stack value for the mode passed in R0
*/
.
82c
/* set the translation table base */
.
30a
BL _div(SB) /* hack to get _div etc loaded */
.
28d
## diffname bitsy/l.s 2000/0905
## diff -e /n/emeliedump/2000/0904/sys/src/9/bitsy/l.s /n/emeliedump/2000/0905/sys/src/9/bitsy/l.s
20,21c
/* drain write buffer */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0), 4
/* disable the MMU */
MOVW $0x130, R1
MCR CpMMU, 0, R1, C(CpControl), C(0x0)
.
15,18c
/* flush TLB's */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x0)
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
.
## diffname bitsy/l.s 2000/0906
## diff -e /n/emeliedump/2000/0905/sys/src/9/bitsy/l.s /n/emeliedump/2000/0906/sys/src/9/bitsy/l.s
91a
/* set the translation table base */
TEXT putdac(SB), $-4
MCR CpMMU, 0, R0, C(CpDAC), C(0x0)
/* set the translation table base */
TEXT putpid(SB), $-4
MCR CpMMU, 0, R0, C(CpPID), C(0x0)
.
90c
TEXT putttb(SB), $-4
.
2,3d
## diffname bitsy/l.s 2000/0928
## diff -e /n/emeliedump/2000/0906/sys/src/9/bitsy/l.s /n/emeliedump/2000/0928/sys/src/9/bitsy/l.s
238c
TEXT splxpc(SB), $-4 /* for iunlock */
.
## diffname bitsy/l.s 2000/0929
## diff -e /n/emeliedump/2000/0928/sys/src/9/bitsy/l.s /n/emeliedump/2000/0929/sys/src/9/bitsy/l.s
209c
BL trap(SB)
.
119c
TEXT exceptionvectors(SB), $-4
.
97a
RET
.
95c
/* set address translation pid */
.
93a
RET
.
90a
/*
* enable mmu, i and d caches, and exception vectors at 0xffff0000
*/
TEXT mmuenable(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
ORR $(CpCmmuena|CpCdcache|CpCicache|CpCvivec), R0
MCR CpMMU, 0, R0, C(CpControl), C(0x0)
RET
TEXT mmudisable(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
BIC $(CpCmmuena|CpCdcache|CpCicache|CpCvivec), R0
MCR CpMMU, 0, R0, C(CpControl), C(0x0)
RET
.
89a
RET
.
74c
MRC CpMMU, 0, R0, C(CpCPUID), C(0x0)
.
68c
TEXT wbflush(SB), $-4
.
## diffname bitsy/l.s 2000/1001
## diff -e /n/emeliedump/2000/0929/sys/src/9/bitsy/l.s /n/emeliedump/2000/1001/sys/src/9/bitsy/l.s
234c
MOVM.DB.W.S (R13), [R0-R14] /* restore registers */
.
212,213c
* if the original interrupt happened while executing SVC mode,
* the User R14 in the Ureg is wrong. We need to save the SVC one there.
.
207c
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
.
175,179d
170c
TEXT _vabt(SB), $-4 /* prefetch abort */
.
155c
TEXT _vrst(SB), $-4
BL reset
TEXT _vsvc(SB), $-4 /* SWI */
.
149,151c
WORD $_vabt(SB) /* prefetch abort, switch to svc mode */
WORD $_vabt(SB) /* data abort, switch to svc mode */
WORD $_vrst(SB) /* reserved, shouldn't happen */
.
146c
WORD $_vrst(SB) /* reset, in svc mode already */
.
## diffname bitsy/l.s 2000/1002
## diff -e /n/emeliedump/2000/1001/sys/src/9/bitsy/l.s /n/emeliedump/2000/1002/sys/src/9/bitsy/l.s
232c
MOVM.DB.S (R13), [R0-R14] /* restore registers */
ADD $8, R13 /* pop past ureg->{type+psr} */
RFE /* MOVM.IA.S.W (R13), [R15] */
TEXT _vfiq(SB), $-4 /* FIQ */
RFE /* FIQ is special, ignore it for now */
TEXT forkret(SB),$-4
ADD $(4*15), R13 /* make r13 point to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
MOVW R0, SPSR /* ... */
MOVM.DB.S (R13), [R0-R14] /* restore registers */
.
227d
224c
SUB $8, R13 /* space for argument+link (for debugger) */
.
218,221c
ADD $(8+4*15), R13 /* make r13 point to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
MOVW R0, SPSR /* ... */
MOVM.DB (R13), [R0-R14] /* restore registers */
ADD $8, R13 /* pop past ureg->{type+psr} */
RFE /* MOVM.IA.S.W (R13), [R15] */
/* here for trap from USER mode */
_userexcep:
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
MOVM.DB.W.S [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
.
209,216c
BL trap(SB)
.
206,207c
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
MOVM.DB.W [R0-R14], (R13) /* save kernel level registers, at end r13 points to ureg */
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
MOVW R13, R0 /* first arg is pointer to ureg */
SUB $8, R13 /* space for argument+link (for debugger) */
.
200,204c
/* interupted code kernel or user? */
AND.S $0xf, R1, R4
B.EQ _userexcep
/* here for trap from SVC mode */
.
189c
_vswitch:
.
187c
* come here with type in R0 and R13 pointing above saved [r0-r4]
* and type in r0. we'll switch to SVC mode and then call trap.
.
183,185d
179c
MOVM.IA [R0-R4], (R13) /* free some working space */
.
174c
MOVM.IA [R0-R4], (R13) /* free some working space */
.
169c
MOVM.IA [R0-R4], (R13) /* free some working space */
.
167a
BL syscall(SB)
ADD $(8+4*15), R13 /* make r13 point to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
MOVW R0, SPSR /* ... */
MOVM.DB.S (R13), [R0-R14] /* restore registers */
ADD $8, R13 /* pop past ureg->{type+psr} */
RFE /* MOVM.IA.S.W (R13), [R15] */
.
166c
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
MOVW R13, R0 /* first arg is pointer to ureg */
SUB $8, R13 /* space for argument+link */
.
164c
MOVW.DB.W R14, (R13) /* ... */
.
162c
MOVW.DB.W R14, (R13) /* ... */
.
159,160c
MOVW.DB.W R14, (R13) /* ureg->pc = interupted PC */
.
156c
BL reset(SB)
.
## diffname bitsy/l.s 2000/1007
## diff -e /n/emeliedump/2000/1002/sys/src/9/bitsy/l.s /n/emeliedump/2000/1007/sys/src/9/bitsy/l.s
137,153c
TEXT vectors(SB), $-4
MOVW 0x18(R15), R15 /* reset */
MOVW 0x18(R15), R15 /* undefined */
MOVW 0x18(R15), R15 /* SWI */
MOVW 0x18(R15), R15 /* prefetch abort */
MOVW 0x18(R15), R15 /* data abort */
MOVW 0x18(R15), R15 /* reserved */
MOVW 0x18(R15), R15 /* IRQ */
MOVW 0x18(R15), R15 /* FIQ */
TEXT vtable(SB), $-4
WORD $_vsvc(SB) /* reset, in svc mode already */
WORD $_vund(SB) /* undefined, switch to svc mode */
WORD $_vsvc(SB) /* swi, in svc mode already */
WORD $_vabt(SB) /* prefetch abort, switch to svc mode */
WORD $_vabt(SB) /* data abort, switch to svc mode */
WORD $_vsvc(SB) /* reserved */
WORD $_virq(SB) /* IRQ, switch to svc mode */
WORD $_vfiq(SB) /* FIQ, switch to svc mode */
.
106a
/*
* use exception vectors at 0xffff0000
*/
TEXT mappedIvecEnable(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
ORR $(CpCvivec), R0
MCR CpMMU, 0, R0, C(CpControl), C(0x0)
RET
TEXT mappedIvecDisable(SB), $-4
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
BIC $(CpCvivec), R0
MCR CpMMU, 0, R0, C(CpControl), C(0x0)
RET
.
97c
ORR $(CpCmmuena|CpCdcache|CpCicache), R0
.
93c
* enable mmu, i and d caches
.
69c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
.
59c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
.
54c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6), 0
.
44c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
.
39c
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
.
24,27d
22c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
.
13,15c
/* disable the MMU */
MOVW $0x130, R1
MCR CpMMU, 0, R1, C(CpControl), C(0x0)
/* flush caches */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
/* drain prefetch */
.
## diffname bitsy/l.s 2000/1010
## diff -e /n/emeliedump/2000/1007/sys/src/9/bitsy/l.s /n/emeliedump/2000/1010/sys/src/9/bitsy/l.s
228c
BEQ _userexcep
.
59a
.
58a
/* write back any dirty data */
MOVW $0xe0000000,R0
ADD $(8*1024),R0,R1
_wbloop:
MOVW.W 32(R0),R2
CMP R0,R1
BNE _wbloop
/* flush cache contents */
.
## diffname bitsy/l.s 2000/1012
## diff -e /n/emeliedump/2000/1010/sys/src/9/bitsy/l.s /n/emeliedump/2000/1012/sys/src/9/bitsy/l.s
279a
/*
* This is the first jump from kernel to user mode.
* Fake a return from interrupt.
*
* Enter with R0 containing the user stack pointer.
* UTZERO + 0x20 is always the entry point.
*
*/
TEXT touser(SB),$-4
/* store the user stack pointer into the USR_r13 */
MOVM.DB.W [R0], (R13)
MOVM.S.IA.W (R13),[R13]
/* set up a PSR for user level */
MOVW $(PsrMusr), R0
MOVW R0,SPSR
/* save the PC on the stack */
MOVW $(UTZERO+0x20), R0
MOVM.DB.W [R0],(R13)
/* return from interrupt */
RFE /* MOVM.IA.S.W (R13), [R15] */
/*
* here to jump to a newly forked process
*/
.
215a
TEXT _vdabt(SB), $-4 /* prefetch abort */
MOVM.IA [R0-R4], (R13) /* free some working space */
MOVW $(PsrMabt+1), R0 /* r0 = type */
B _vswitch
.
211c
TEXT _vpabt(SB), $-4 /* prefetch abort */
.
176,177c
WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
WORD $_vdabt(SB) /* data abort, switch to svc mode */
.
## diffname bitsy/l.s 2000/1013
## diff -e /n/emeliedump/2000/1012/sys/src/9/bitsy/l.s /n/emeliedump/2000/1013/sys/src/9/bitsy/l.s
390,399d
113c
BIC $(CpCmmuena|CpCvivec), R0
.
107c
ORR $(CpCmmuena), R0
.
75d
42,56d
## diffname bitsy/l.s 2000/1014
## diff -e /n/emeliedump/2000/1013/sys/src/9/bitsy/l.s /n/emeliedump/2000/1014/sys/src/9/bitsy/l.s
174c
MOVW.W R14, -4(R13) /* ... */
.
172c
MOVW.W R14, -4(R13) /* ... */
.
170c
MOVW.W R14, -4(R13) /* ureg->pc = interupted PC */
.
80a
/* return fault address */
TEXT putfar(SB), $-4
MRC CpMMU, 0, R0, C(CpFAR), C(0x0)
RET
.
60a
/* clean a single virtual address */
TEXT cleanaddr(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
RET
/* flush i and d caches */
TEXT flushcache(SB), $-4
/* flush cache contents */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
RET
.
59a
RET
.
42,43c
/* clean and flush i and d caches */
TEXT cleancache(SB), $-4
.
## diffname bitsy/l.s 2000/1015
## diff -e /n/emeliedump/2000/1014/sys/src/9/bitsy/l.s /n/emeliedump/2000/1015/sys/src/9/bitsy/l.s
120c
BIC $(CpCmmuena|CpCdcache|CpCwb|CpCvivec), R0
.
114c
ORR $(CpCmmuena|CpCwb|CpCdcache), R0
.
67,78d
63c
TEXT cacheflushaddr(SB), $-4
.
52c
/* drain write buffer and flush i&d cache contents */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
.
42,43c
/* write back and invalidate i and d caches */
TEXT cacheflush(SB), $-4
.
38c
TEXT mmuinvalidate(SB), $-4
.
## diffname bitsy/l.s 2000/1018
## diff -e /n/emeliedump/2000/1015/sys/src/9/bitsy/l.s /n/emeliedump/2000/1018/sys/src/9/bitsy/l.s
103c
ORR $(CpCmmuena|CpCdcache|CpCwb), R0
.
69a
_wbflush:
.
67a
/* invalidate the icache */
TEXT icacheinvalidate(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
RET
.
65a
B _wbflush
/* write back a region of cache lines */
TEXT cachewbregion(SB), $-4
MOVW 4(FP),R1
BIC $31,R0
ADD R0,R1
ADD $32,R1
_cfrloop:
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
ADD $32,R0
CMP.S R0,R1
BNE _cfrloop
B _wbflush
/* invalidate the dcache */
TEXT dcacheinvalidate(SB), $-4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
.
63,64c
/* write back and invalidate i and d caches */
TEXT cachewb(SB), $-4
/* write back any dirty data */
MOVW $0xe0000000,R0
ADD $(8*1024),R0,R1
_cwbloop:
MOVW.P 32(R0),R2
CMP R0,R1
BNE _cfloop
/* drain write buffer */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
RET
/* write back a single cache line */
TEXT cachewbaddr(SB), $-4
BIC $31,R0
.
52c
/* drain write buffer and invalidate i&d cache contents */
.
50c
BNE _cfloop
.
47,48c
_cfloop:
MOVW.P 32(R0),R2
.
41a
/* flush tlb's */
TEXT mmuinvalidateaddr(SB), $-4
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
RET
.
## diffname bitsy/l.s 2000/1028
## diff -e /n/emeliedump/2000/1018/sys/src/9/bitsy/l.s /n/emeliedump/2000/1028/sys/src/9/bitsy/l.s
291a
MOVW $0xdeaddead,R11 /* marker */
.
## diffname bitsy/l.s 2000/1101
## diff -e /n/emeliedump/2000/1028/sys/src/9/bitsy/l.s /n/emeliedump/2000/1101/sys/src/9/bitsy/l.s
383a
RET
TEXT spldone(SB), $0
.
374a
/* save caller pc in Mach */
MOVW $(MACHADDR+0x04),R2
MOVW R14,0(R2)
/* reset interrupt level */
.
362a
/* save caller pc in Mach */
MOVW $(MACHADDR+0x04),R2
MOVW R14,0(R2)
/* turn off interrupts */
.
153c
BIC $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
.
147c
ORR $(CpCmmuena|CpCdcache|CpCicache|CpCwb), R0
.
## diffname bitsy/l.s 2000/1102
## diff -e /n/emeliedump/2000/1101/sys/src/9/bitsy/l.s /n/emeliedump/2000/1102/sys/src/9/bitsy/l.s
68c
/* write back d cache */
.
## diffname bitsy/l.s 2000/1106
## diff -e /n/emeliedump/2000/1102/sys/src/9/bitsy/l.s /n/emeliedump/2000/1106/sys/src/9/bitsy/l.s
410,419d
98c
BGT _cwbrloop
.
93,94c
BIC $31,R0
_cwbrloop:
.
91c
CMP.S $(4*1024),R1
BGT _cachewb
.
75,76c
CMP.S R0,R1
BNE _cwbloop
.
70a
_cachewb:
.
54c
CMP.S R0,R1
.
## diffname bitsy/l.s 2000/1130
## diff -e /n/emeliedump/2000/1106/sys/src/9/bitsy/l.s /n/emeliedump/2000/1130/sys/src/9/bitsy/l.s
433a
/* The first MCR instruction of this function needs to be on a cache-line
* boundary; to make this happen, it will be copied (in trap.c).
*
* Doze puts the machine into idle mode. Any interrupt will get it out
* at the next instruction (the RET, to be precise).
*/
TEXT _doze(SB), $-4
MOVW $UCDRAMZERO, R1
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
MOVW (R1), R0
MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2
RET
.
## diffname bitsy/l.s 2000/1205
## diff -e /n/emeliedump/2000/1130/sys/src/9/bitsy/l.s /n/emeliedump/2000/1205/sys/src/9/bitsy/l.s
225c
BL resettrap(SB)
.
## diffname bitsy/l.s 2000/1206
## diff -e /n/emeliedump/2000/1205/sys/src/9/bitsy/l.s /n/emeliedump/2000/1206/sys/src/9/bitsy/l.s
28c
MOVW $(MACHADDR+2*BY2PG), R13 /* stack */
.
## diffname bitsy/l.s 2000/1207
## diff -e /n/emeliedump/2000/1206/sys/src/9/bitsy/l.s /n/emeliedump/2000/1207/sys/src/9/bitsy/l.s
420a
CMP.S R0, R3
BEQ _tasout
EOR R3, R3
CMP.S R0, R3
BEQ _tasout
MOVW $1,R15
_tasout:
.
419a
MOVW R2, R3
.
401,402c
AND $(PsrDirq), R0
EOR $(PsrDirq), R0
.
376c
BIC $(PsrDirq), R0, R1
.
370c
ORR $(PsrDirq), R0, R1
.
59c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
.
57c
/* drain write buffer and invalidate i cache contents */
.
28c
MOVW $(MACHADDR+4*BY2PG), R13 /* stack */
.
## diffname bitsy/l.s 2000/1208
## diff -e /n/emeliedump/2000/1207/sys/src/9/bitsy/l.s /n/emeliedump/2000/1208/sys/src/9/bitsy/l.s
419,421c
MOVW $0xDEADDEAD, R0
MOVW R0, R3
SWPW R0, (R1)
.
## diffname bitsy/l.s 2000/1212
## diff -e /n/emeliedump/2000/1208/sys/src/9/bitsy/l.s /n/emeliedump/2000/1212/sys/src/9/bitsy/l.s
77c
BGE _cwbloop
.
65a
/* splx */
MOVW R3, CPSR
.
55c
BGE _cfloop
.
48a
/* splhi */
MOVW CPSR, R3
ORR $(PsrDirq), R3, R1
MOVW R1, CPSR
.
## diffname bitsy/l.s 2001/0324
## diff -e /n/emeliedump/2000/1212/sys/src/9/bitsy/l.s /n/emeliedump/2001/0324/sys/src/9/bitsy/l.s
142,146d
## diffname bitsy/l.s 2001/0618
## diff -e /n/emeliedump/2001/0324/sys/src/9/bitsy/l.s /n/emeliedump/2001/0618/sys/src/9/bitsy/l.s
445a
/* save the state machine in power_resume[] for an upcoming suspend
*/
TEXT setpowerlabel(SB), $-4
MOVW $power_resume+0(SB), R0
/* svc */ /* power_resume[]: what */
MOVW R1, 0(R0)
MOVW R2, 4(R0)
MOVW R3, 8(R0)
MOVW R4, 12(R0)
MOVW R5, 16(R0)
MOVW R6, 20(R0)
MOVW R7, 24(R0)
MOVW R8, 28(R0)
MOVW R9, 32(R0)
MOVW R10,36(R0)
MOVW R11,40(R0)
MOVW R12,44(R0)
MOVW R13,48(R0)
MOVW R14,52(R0)
MOVW SPSR, R1
MOVW R1, 56(R0)
MOVW CPSR, R2
MOVW R2, 60(R0)
/* copro */
MRC CpMMU, 0, R3, C(CpDAC), C(0x0)
MOVW R3, 144(R0)
MRC CpMMU, 0, R3, C(CpTTB), C(0x0)
MOVW R3, 148(R0)
MRC CpMMU, 0, R3, C(CpControl), C(0x0)
MOVW R3, 152(R0)
MRC CpMMU, 0, R3, C(CpFSR), C(0x0)
MOVW R3, 156(R0)
MRC CpMMU, 0, R3, C(CpFAR), C(0x0)
MOVW R3, 160(R0)
MRC CpMMU, 0, R3, C(CpPID), C(0x0)
MOVW R3, 164(R0)
/* irq */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMirq), R3
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 64(R0)
MOVW R12, 68(R0)
MOVW R13, 72(R0)
MOVW R14, 76(R0)
/* und */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMund), R3
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 80(R0)
MOVW R12, 84(R0)
MOVW R13, 88(R0)
MOVW R14, 92(R0)
/* abt */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMabt), R3
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 96(R0)
MOVW R12, 100(R0)
MOVW R13, 104(R0)
MOVW R14, 108(R0)
/* fiq */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMfiq), R3
MOVW R3, CPSR
MOVW SPSR, R7
MOVW R7, 112(R0)
MOVW R8, 116(R0)
MOVW R9, 120(R0)
MOVW R10,124(R0)
MOVW R11,128(R0)
MOVW R12,132(R0)
MOVW R13,136(R0)
MOVW R14,140(R0)
/* done */
MOVW R2, CPSR
MOVW R1, SPSR
MOVW $0, R0
RET
/* Entered after a resume from suspend state.
* The bootldr jumps here after a processor reset.
*/
TEXT sa1100_power_resume(SB), $-4
MOVW $setR12(SB), R12 /* load the SB */
/* SVC mode, interrupts disabled */
MOVW $(PsrDirq|PsrDfiq|PsrMsvc), R1
MOVW R1, CPSR
/* flush caches */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* drain write buffer */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
/* gotopowerlabel() */
/* svc */
MOVW $power_resume+0(SB), R0
MOVW 56(R0), R1 /* R1: SPSR, R2: CPSR */
MOVW 60(R0), R2
/* copro */
MOVW 148(R0), R3
MCR CpMMU, 0, R3, C(CpTTB), C(0x0)
MOVW 144(R0), R3
MCR CpMMU, 0, R3, C(CpDAC), C(0x0)
MOVW 152(R0), R3
MCR CpMMU, 0, R3, C(CpControl), C(0x0)
MOVW 156(R0), R3
MCR CpMMU, 0, R3, C(CpFSR), C(0x0)
MOVW 160(R0), R3
MCR CpMMU, 0, R3, C(CpFAR), C(0x0)
MOVW 164(R0), R3
MCR CpMMU, 0, R3, C(CpPID), C(0x0)
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
/* irq */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMirq), R3
MOVW R3, CPSR
MOVW 64(R0), R11
MOVW 68(R0), R12
MOVW 72(R0), R13
MOVW 76(R0), R14
MOVW R11, SPSR
/* und */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMund), R3
MOVW R3, CPSR
MOVW 80(R0), R11
MOVW 84(R0), R12
MOVW 88(R0), R13
MOVW 92(R0), R14
MOVW R11, SPSR
/* abt */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMabt), R3
MOVW R3, CPSR
MOVW 96(R0), R11
MOVW 100(R0), R12
MOVW 104(R0), R13
MOVW 108(R0), R14
MOVW R11, SPSR
/* fiq */
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMfiq), R3
MOVW R3, CPSR
MOVW 112(R0), R7
MOVW 116(R0), R8
MOVW 120(R0), R9
MOVW 124(R0), R10
MOVW 128(R0), R11
MOVW 132(R0), R12
MOVW 136(R0), R13
MOVW 140(R0), R14
MOVW R7, SPSR
/* svc */
MOVW 56(R0), R1
MOVW 60(R0), R2
MOVW R1, SPSR
MOVW R2, CPSR
MOVW 0(R0), R1
MOVW 4(R0), R2
MOVW 8(R0), R3
MOVW 12(R0),R4
MOVW 16(R0),R5
MOVW 20(R0),R6
MOVW 24(R0),R7
MOVW 28(R0),R8
MOVW 32(R0),R9
MOVW 36(R0),R10
MOVW 40(R0),R11
MOVW 44(R0),R12
MOVW 48(R0),R13
MOVW 52(R0),R14
RET
loop:
B loop
.
## diffname bitsy/l.s 2001/0619
## diff -e /n/emeliedump/2001/0618/sys/src/9/bitsy/l.s /n/emeliedump/2001/0619/sys/src/9/bitsy/l.s
627,628d
## diffname bitsy/l.s 2001/0620
## diff -e /n/emeliedump/2001/0619/sys/src/9/bitsy/l.s /n/emeliedump/2001/0620/sys/src/9/bitsy/l.s
530c
*/
.
## diffname bitsy/l.s 2001/0621
## diff -e /n/emeliedump/2001/0620/sys/src/9/bitsy/l.s /n/emeliedump/2001/0621/sys/src/9/bitsy/l.s
644c
MCR CpPWR, 0, R0, C(CpTest), C(0x8), 2
.
642c
MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
.
625c
B loop
.
592,593c
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMfiq), R3
.
583,584c
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMabt), R3
.
574,575c
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMund), R3
.
565,566c
BIC $(PsrMask), R2, R3
ORR $(PsrDirq|PsrMirq), R3
.
561c
MOVW 164(R0), R3
.
559c
MOVW 160(R0), R3
.
557c
MOVW 156(R0), R3
.
555c
MOVW 152(R0), R3
.
553c
MOVW 144(R0), R3
.
551c
MOVW 148(R0), R3
.
537c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
.
523,525c
MOVW R2, CPSR
MOVW R1, SPSR
MOVW $0, R0
.
512,521c
MOVW R3, CPSR
MOVW SPSR, R7
MOVW R7, 112(R0)
MOVW R8, 116(R0)
MOVW R9, 120(R0)
MOVW R10,124(R0)
MOVW R11,128(R0)
MOVW R12,132(R0)
MOVW R13,136(R0)
MOVW R14,140(R0)
.
503,508c
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 96(R0)
MOVW R12, 100(R0)
MOVW R13, 104(R0)
MOVW R14, 108(R0)
.
494,499c
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 80(R0)
MOVW R12, 84(R0)
MOVW R13, 88(R0)
MOVW R14, 92(R0)
.
485,490c
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 64(R0)
MOVW R12, 68(R0)
MOVW R13, 72(R0)
MOVW R14, 76(R0)
.
481c
MOVW R3, 164(R0)
.
479c
MOVW R3, 160(R0)
.
477c
MOVW R3, 156(R0)
.
475c
MOVW R3, 152(R0)
.
473c
MOVW R3, 148(R0)
.
471c
MOVW R3, 144(R0)
.
451,468c
MOVW R1, 0(R0)
MOVW R2, 4(R0)
MOVW R3, 8(R0)
MOVW R4, 12(R0)
MOVW R5, 16(R0)
MOVW R6, 20(R0)
MOVW R7, 24(R0)
MOVW R8, 28(R0)
MOVW R9, 32(R0)
MOVW R10,36(R0)
MOVW R11,40(R0)
MOVW R12,44(R0)
MOVW R13,48(R0)
MOVW R14,52(R0)
MOVW SPSR, R1
MOVW R1, 56(R0)
MOVW CPSR, R2
MOVW R2, 60(R0)
.
415a
TEXT getsp(SB), $-4
MOVW R13, R0
RET
TEXT getlink(SB), $-4
MOVW R14, R0
RET
.
86c
.
81c
ADD $(8*1024),R0,R1
.
63,64c
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
.
56c
ADD $(8*1024),R0,R1
.
51c
ORR $(PsrDirq), R3, R1
.
## diffname bitsy/l.s 2001/0809
## diff -e /n/emeliedump/2001/0621/sys/src/9/bitsy/l.s /n/emeliedump/2001/0809/sys/src/9/bitsy/l.s
557a
MOVW R1, SPSR
MOVW R2, CPSR
.
554a
/* Debug
MOVW $0x25, R0
BL _PrChar(SB)
*/
.
453a
/* Debugging print routine */
TEXT _PrChar(SB), $-4
MOVW UART3REGS, R1
prloop:
MOVW 0x20(R1), R2
AND $0x4, R2
BEQ prloop
MOVB R0,0x14(R1)
RET
.
## diffname bitsy/l.s 2001/0811
## diff -e /n/emeliedump/2001/0809/sys/src/9/bitsy/l.s /n/emeliedump/2001/0811/sys/src/9/bitsy/l.s
565,568c
.
## diffname bitsy/l.s 2001/0812
## diff -e /n/emeliedump/2001/0811/sys/src/9/bitsy/l.s /n/emeliedump/2001/0812/sys/src/9/bitsy/l.s
454,463d
## diffname bitsy/l.s 2001/0814
## diff -e /n/emeliedump/2001/0812/sys/src/9/bitsy/l.s /n/emeliedump/2001/0814/sys/src/9/bitsy/l.s
567c
MCR CpMMU, 0, R3, C(CpControl), C(0x0) /* Enable cache */
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
.
153a
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
.
136a
/* return mmu control register */
TEXT getcontrol(SB), $-4
SUB R0, R0
MRC CpMMU, 0, R0, C(CpControl), C(0x0)
RET
/* return mmu dac register */
TEXT getdac(SB), $-4
SUB R0, R0
MRC CpMMU, 0, R0, C(CpDAC), C(0x0)
RET
/* return mmu ttb register */
TEXT getttb(SB), $-4
SUB R0, R0
MRC CpMMU, 0, R0, C(CpTTB), C(0x0)
RET
.
## diffname bitsy/l.s 2001/0815
## diff -e /n/emeliedump/2001/0814/sys/src/9/bitsy/l.s /n/emeliedump/2001/0815/sys/src/9/bitsy/l.s
600c
MOVW 152(R0), R3
MCR CpMMU, 0, R3, C(CpControl), C(0x0) /* Enable cache */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* flush i&d caches */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* flush tlb */
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7), 0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
.
588,593c
MOVW 148(R0), R3
MCR CpMMU, 0, R3, C(CpTTB), C(0x0)
.
584,585c
/* flush caches */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
/* drain prefetch */
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* drain write buffer */
MCR CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
MCR CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
.
566,574d
## diffname bitsy/l.s 2001/0816
## diff -e /n/emeliedump/2001/0815/sys/src/9/bitsy/l.s /n/emeliedump/2001/0816/sys/src/9/bitsy/l.s
609a
/* drain prefetch */
.
603,607d
## diffname bitsy/l.s 2001/0817
## diff -e /n/emeliedump/2001/0816/sys/src/9/bitsy/l.s /n/emeliedump/2001/0817/sys/src/9/bitsy/l.s
672a
TEXT sa1100_power_down(SB), $-4
/* disable clock switching */
MCR CpPWR, 0, R0, C(CpTest), C(0x2), 2
/* save address of sleep_param in r4 */
mov r0, r4
@ Adjust memory timing before lowering CPU clock
/* Clock speed ajdustment without changing memory timing makes */
/* CPU hang in some cases */
ldr r0, =MDREFR
ldr r1, [r0]
orr r1, r1, #MDREFR_K1DB2
str r1, [r0]
/* delay 90us and set CPU PLL to lowest speed */
/* fixes resume problem on high speed SA1110 */
mov r0, #90
bl SYMBOL_NAME(udelay)
ldr r0, =PPCR
mov r1, #0
str r1, [r0]
mov r0, #90
bl SYMBOL_NAME(udelay)
/* setup up register contents for jump to page containing SA1110 SDRAM controller bug fix suspend code
*
* r0 points to MSC0 register
* r1 points to MSC1 register
* r2 points to MSC2 register
* r3 is MSC0 value
* r4 is MSC1 value
* r5 is MSC2 value
* r6 points to MDREFR register
* r7 is first MDREFR value
* r8 is second MDREFR value
* r9 is pointer to MDCNFG register
* r10 is MDCNFG value
* r11 is third MDREFR value
* r12 is pointer to PMCR register
* r13 is PMCR value (1)
*
*/
ldr r0, =MSC0
ldr r1, =MSC1
ldr r2, =MSC2
ldr r3, [r0]
bic r3, r3, #FMsk(MSC_RT)
bic r3, r3, #FMsk(MSC_RT)<<16
ldr r4, [r1]
bic r4, r4, #FMsk(MSC_RT)
bic r4, r4, #FMsk(MSC_RT)<<16
ldr r5, [r2]
bic r5, r5, #FMsk(MSC_RT)
bic r5, r5, #FMsk(MSC_RT)<<16
ldr r6, =MDREFR
ldr r7, [r6]
bic r7, r7, #0x0000FF00
bic r7, r7, #0x000000F0
orr r8, r7, #MDREFR_SLFRSH
ldr r9, =MDCNFG
ldr r10, [r9]
bic r10, r10, #(MDCNFG_DE0+MDCNFG_DE1)
bic r10, r10, #(MDCNFG_DE2+MDCNFG_DE3)
bic r11, r8, #MDREFR_SLFRSH
bic r11, r11, #MDREFR_E1PIN
ldr r12, =PMCR
mov r13, #PMCR_SF
b sa1110_sdram_controller_fix
.align 5
sa1110_sdram_controller_fix:
/* Step 1 clear RT field of all MSCx registers */
str r3, [r0]
str r4, [r1]
str r5, [r2]
/* Step 2 clear DRI field in MDREFR */
str r7, [r6]
/* Step 3 set SLFRSH bit in MDREFR */
str r8, [r6]
/* Step 4 clear DE bis in MDCNFG */
str r10, [r9]
/* Step 5 clear DRAM refresh control register */
str r11, [r6]
/* Wow, now the hardware suspend request pins can be used, that makes them functional for */
/* about 7 ns out of the entire time that the CPU is running! */
/* Step 6 set force sleep bit in PMCR */
str r13, [r12]
20:
b 20b /* loop waiting for sleep */
.
## diffname bitsy/l.s 2001/0818
## diff -e /n/emeliedump/2001/0817/sys/src/9/bitsy/l.s /n/emeliedump/2001/0818/sys/src/9/bitsy/l.s
781,784c
MOVW R13,(R12)
slloop:
B slloop /* loop waiting for sleep */
.
774c
MOVW R11,(R6)
.
771c
MOVW R10,(R9)
.
768c
MOVW R8,(R6)
.
765c
MOVW R7,(R6)
.
760,762c
MOVW R3,(R0)
MOVW R4,(R1)
MOVW R5,(R2)
.
756,758d
754c
/* .align 5 Needs to be on a cache line boundary? */
TEXT sdram_controller_fix(SB),$0
.
752c
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* Fall through */
.
750c
MOVW $(POWERREGS+0x0),R12
MOVW $1,R13 /* sleep force bit */
.
747,748c
BIC $(1<<31 | 1<<20),R8,R11 /* self-refresh and e1pin */
.
742,745c
MOVW $(MEMCONFREGS+0x0),R9
MOVW (R9),R10
BIC $(0x00030003),R10 /* clear DE0 ⋯ DE3 */
.
737,740c
MOVW (R6),R7
BIC $0x0000FFF0,R7 /* DRI 0 .. 11 */
ORR $(1<<31),R7,R8 /* prepare to set self refresh */
.
735c
MOVW $(MEMCONFREGS+0x1c),R6
.
731,733c
MOVW (R2),R5
BIC $0x00030003,R5 /* MSC_RT fields */
.
727,729c
MOVW (R1),R4
BIC $0x00030003,R4 /* MSC_RT fields */
.
723,725c
MOVW (R0),R3
BIC $0x00030003,R3 /* MSC_RT fields */
.
719,721c
MOVW $(MEMCONFREGS+0x10),R0
MOVW $(MEMCONFREGS+0x14),R1
MOVW $(MEMCONFREGS+0x2c),R2
.
699a
MOVW $(90*206),R0
d2:
SUB $1,R0
BNE d2
.
698a
MOVW $(POWERREGS+0x14),R0
MOVW $0,R1
MOVW R1,(R0) /* clear PPCR */
.
691,697c
MOVW $(90*206),R0
d1:
SUB $1,R0
BNE d1
.
678,687c
/* Adjust memory timing before lowering CPU clock
* Clock speed ajdustment without changing memory
* timing makes CPU hang in some cases
*/
MOVW $(MEMCONFREGS+0x1c),R0
MOVW (R0),R1 /* mdrefr */
ORR $(1<<22), R1 /* set K1DB2 */
MOVW R1,(R0)
.
673c
TEXT power_down(SB), $-4
.
## diffname bitsy/l.s 2001/0820
## diff -e /n/emeliedump/2001/0818/sys/src/9/bitsy/l.s /n/emeliedump/2001/0820/sys/src/9/bitsy/l.s
788a
TEXT coma(SB), $-4
MOVW $1,R1
MOVW $(MEMCONFREGS+0x1c),R2
MOVW $(POWERREGS+0x0),R3
// MOVW R0,(R2)
MOVW R1,(R3)
comaloop:
B comaloop
.
727,786d
723,725c
MOVW $1,R2
MOVW R4,0x1c(R3)
// MOVW R6,0x1c(R3)
// MOVW R12,(R3)
// MOVW R11,0x1c(R3)
MOVW R2,0(R5)
.
704,721c
MOVW 0x10(R3),R2
AND $0xfffcfffc,R2
MOVW R2,0x10(R3)
MOVW 0x14(R3),R2
AND $0xfffcfffc,R2
MOVW R2,0x14(R3)
MOVW 0x2c(R3),R2
AND $0xfffcfffc,R2
MOVW R2,0x2c(R3)
.
702a
MOVW memconfregs+0(SB),R3
MOVW (R3),R12
AND $(~0x30003),R12
.
699,701c
l14: SUB $1,R0
BGT l14
MOVW powerregs+0(SB),R5
MOVW refr-4(SP),R4
AND $(~0xfff0),R4
ORR $0x80000000,R4,R6
AND $(~0x80100000),R6,R11
.
690,697c
l13: SUB $1,R0
BGT l13
MOVW powerregs+0(SB),R3
MOVW $0,R2
MOVW R2,20(R3)
.
675,688c
TEXT sa1100_power_off<>+0(SB),$8
MOVW resetregs+0(SB),R7
MOVW gpioregs+0(SB),R6
MOVW memconfregs+0(SB),R5
MOVW powerregs+0(SB),R3
MOVW 0x1c(R5),R1
ORR $0x30400000,R1
MOVW R1,refr-4(SP)
MOVW $0x80000003,R2
MOVW R2,0xc(R3)
MOVW $15,R2
MOVW R2,0x4(R7)
MOVW $7,R2
MOVW R2,0x10(R3)
MOVW $0,R2
MOVW R2,0x18(R3)
MOVW $sa1100_power_resume+0(SB),R2
MOVW R2,0x8(R3)
MOVW $0,R2
MOVW R2,0x4(R6)
MOVW 0x1c(R5),R2
ORR $0x400000,R2
MOVW R2,0x1c(R5)
.
## diffname bitsy/l.s 2001/0821
## diff -e /n/emeliedump/2001/0820/sys/src/9/bitsy/l.s /n/emeliedump/2001/0821/sys/src/9/bitsy/l.s
736,744d
732c
MOVW R2,0x0(R5)
.
730c
MOVW R12,0x0(R3)
.
725a
MOVW R0,R0 /* filler */
.
724c
AND $(~0x00030003),R2
.
721c
AND $(~0x00030003),R2
.
718c
AND $(~0x00030003),R2
.
714c
MOVW 0x0(R3),R12
.
708,709d
691c
MOVW $power_resume+0(SB),R2
.
680,682c
MOVW 0x1c(R5),R4
ORR $0x30400000,R4
AND $(~0xfff0),R4
.
569c
MOVW $power_state+0(SB), R0
.
561c
TEXT power_resume(SB), $-4
.
479,480c
MOVW $power_state+0(SB), R0
/* svc */ /* power_state[]: what */
.
476c
/* save the state machine in power_state[] for an upcoming suspend
.
## diffname bitsy/l.s 2001/0822
## diff -e /n/emeliedump/2001/0821/sys/src/9/bitsy/l.s /n/emeliedump/2001/0822/sys/src/9/bitsy/l.s
691c
/* MOVW $power_resume+0(SB),R2 */
MOVW $0,R2
.
## diffname bitsy/l.s 2002/0424
## diff -e /n/emeliedump/2001/0822/sys/src/9/bitsy/l.s /n/emeliedump/2002/0424/sys/src/9/bitsy/l.s
731c
MOVW R11,0x1c(R3)
.
729c
MOVW R6,0x1c(R3)
.
691,692c
MOVW $power_resume+0(SB),R2
/* MOVW $0,R2 */
.
## diffname bitsy/l.s 2002/0430
## diff -e /n/emeliedump/2002/0424/sys/src/9/bitsy/l.s /n/emeliedump/2002/0430/sys/src/9/bitsy/l.s
733a
/* The first instruction of this function needs to be on a cache-line
* boundary; to make this happen, it will be copied (in trap.c).
*
* Hibernate puts the machine into hibernation.
*/
TEXT _hibernate(SB), $-4
MOVW $1,R2
MOVW R4,0x1c(R3)
MOVW R6,0x1c(R3)
MOVW R12,0x0(R3)
MOVW R11,0x1c(R3)
MOVW R2,0x0(R5)
.
732a
xlloop:
B xlloop /* loop waiting for sleep */
.
726a
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
/* align this on a cache-line boundary */
.
725d
## diffname bitsy/l.s 2002/0518
## diff -e /n/emeliedump/2002/0430/sys/src/9/bitsy/l.s /n/emeliedump/2002/0518/sys/src/9/bitsy/l.s
752,756c
/* Follow the procedure */
/* 1 */
MOVW R1, 0x10(R5)
MOVW R7, 0x14(R5)
MOVW R8, 0x2c(R5)
/* 2 */
MOVW R4, 0x1c(R5)
/* 3 */
MOVW R6, 0x1c(R5)
/* 4 */
MOVW R9, 0x0(R5)
/* 5 */
MOVW R10, 0x1c(R5)
/* 6 */
MOVW R2, 0x0(R3)
.
745,750c
/* set PLL to lower speed w/ delay (ppcr = 0)*/
MOVW powerregs+0(SB),R3
MOVW $(120*206),R0
l11: SUB $1,R0
BGT l11
MOVW $0, R2
MOVW R2, 0x14(R3)
MOVW $(120*206),R0
l12: SUB $1,R0
BGT l12
/* setup registers for suspend procedure:
* 1. clear RT in mscx (R1, R7, R8)
* 2. clear DRI in mdrefr (R4)
* 3. set slfrsh in mdrefr (R6)
* 4. clear DE in mdcnfg (R9)
* 5. clear dram refresh (R10)
* 6. force sleep (R2)
*/
/* 1 */
MOVW 0x10(R5), R2
BIC $(MSC_rt), R2
MOVW R2, R1
MOVW 0x14(R5), R2
BIC $(MSC_rt), R2
MOVW R2, R7
MOVW 0x2c(R5), R2
BIC $(MSC_rt), R2
MOVW R2, R8
/* 2 */
MOVW 0x1c(R5), R2
BIC $(0xff00), R2
BIC $(0x00f0), R2
MOVW R2, R4
/* 3 */
ORR $(MDREFR_slfrsh), R2, R6
/* 4 */
MOVW 0x0(R5), R9
BIC $(MDCFNG_de), R9, R9
/* 5 */
MOVW R4, R2
BIC $(MDREFR_slfrsh), R2, R2
BIC $(MDREFR_e1pin), R2, R2
MOVW R2, R10
/* 6 */
.
726,743c
/* adjust mem timing */
MOVW memconfregs+0(SB),R5
MOVW 0x1c(R5), R2
ORR $(MDREFR_k1db2), R2
MOVW R2, 0x1c(R5)
.
716,724c
/* disable clock switching */
MCR CpPWR, 0, R1, C(CpTest), C(0x2), 2
.
712,714c
BL cacheflush(SB)
.
694,710d
691,692c
/* set resume address (pspr)*/
MOVW $resumeaddr+0(SB),R1
MOVW 0x0(R1), R2
.
688a
/* sleep state */
.
685,687c
/* clear reset status */
MOVW $(RCSR_all), R2
MOVW R2, 0x4(R7)
/* float */
MOVW $(PCFR_opde|PCFR_fp|PCFR_fs), R2
.
680,683c
/* wakeup on power | rtc */
MOVW $(PWR_rtc|PWR_gpio0),R2
.
639c
ORR $(0xd1), R3
.
630c
ORR $(0xd7), R3
.
621c
ORR $(0xdb), R3
.
612c
ORR $(0xd2), R3
.
609a
/* usr */
BIC $(PsrMask), R2, R3
ORR $(0xdf), R3
MOVW 168(R0), R11
MOVW 172(R0), R12
MOVW 176(R0), R13
MOVW 180(R0), R14
MOVW R11, SPSR
.
541c
ORR $(0xd1), R3
.
532c
ORR $(0xd7), R3
.
523c
ORR $(0xdb), R3
.
514c
ORR $(0xd2), R3
.
511a
/* usr */
BIC $(PsrMask), R2, R3
ORR $(0xdf), R3
MOVW R3, CPSR
MOVW SPSR, R11
MOVW R11, 168(R0)
MOVW R12, 172(R0)
MOVW R13, 176(R0)
MOVW R14, 180(R0)
.
## diffname bitsy/l.s 2002/0604
## diff -e /n/emeliedump/2002/0518/sys/src/9/bitsy/l.s /n/emeliedump/2002/0604/sys/src/9/bitsy/l.s
811d
807a
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
MOVW R0,R0
RET
TEXT doze_code(SB), $-4
.
799c
TEXT doze(SB), $-4
.
794c
* boundary; to make this happen, it will be copied to the first cache-line
* boundary 8 words from the start of doze.
.
775c
TEXT power_magic(SB), $-4
/* power_code gets copied into the area of no-ops below,
* at a cache-line boundary (8 instructions)
*/
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
MOVW R0, R0
TEXT power_code(SB), $-4
/* Follow the procedure; this code gets copied to the no-op
* area preceding this code
*/
.
|