Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/sys/arch/arm/arm Add xscale cpufuncs (using the sa110 ones w...



details:   https://anonhg.NetBSD.org/src/rev/9d495bad55f2
branches:  trunk
changeset: 514267:9d495bad55f2
user:      matt <matt%NetBSD.org@localhost>
date:      Mon Aug 27 00:00:27 2001 +0000

description:
Add xscale cpufuncs (using the sa110 ones when they are identical).

diffstat:

 sys/arch/arm/arm/cpufunc_asm.S |  291 +++++++++++++++++++++++++++++++++++++++-
 1 files changed, 277 insertions(+), 14 deletions(-)

diffs (truncated from 429 to 300 lines):

diff -r c4d372284a64 -r 9d495bad55f2 sys/arch/arm/arm/cpufunc_asm.S
--- a/sys/arch/arm/arm/cpufunc_asm.S    Sun Aug 26 21:17:52 2001 +0000
+++ b/sys/arch/arm/arm/cpufunc_asm.S    Mon Aug 27 00:00:27 2001 +0000
@@ -1,6 +1,7 @@
-/*     $NetBSD: cpufunc_asm.S,v 1.4 2001/06/05 09:19:33 bjh21 Exp $    */
+/*     $NetBSD: cpufunc_asm.S,v 1.5 2001/08/27 00:00:27 matt Exp $     */
 
 /*
+ * xscale support code Copyright (c) 2001 Matt Thomas
  * arm7tdmi support code Copyright (c) 2001 John Fremlin
  * arm8 support code Copyright (c) 1997 ARM Limited
  * arm8 support code Copyright (c) 1997 Causality Limited
@@ -238,10 +239,12 @@
 #endif /* CPU_ARM8 */
 
 
-#ifdef CPU_SA110
+#if defined(CPU_SA110) || defined(CPU_XSCALE)
 Lblock_userspace_access:
        .word   _C_LABEL(block_userspace_access)
+#endif
 
+#if defined(CPU_SA110)
 ENTRY(sa110_setttb)
        /* We need to flush the cache as it uses virtual addresses that are about to change */
 #ifdef CACHE_CLEAN_BLOCK_INTR
@@ -257,17 +260,17 @@
        stmfd   sp!, {r0-r3, lr}
        bl      _C_LABEL(sa110_cache_cleanID)
        ldmfd   sp!, {r0-r3, lr}
-       mcr     15, 0, r0, c7, c5, 0
-       mcr     15, 0, r0, c7, c10, 4
+       mcr     15, 0, r0, c7, c5, 0    /* invalidate icache & BTB */
+       mcr     15, 0, r0, c7, c10, 4   /* drain write (& fill) buffer */
 
        /* Write the TTB */
-       mcr     15, 0, r0, c2, c0, 0
+       mcr     15, 0, r0, c2, c0, 0    /* set translation table base */
 
        /* If we have updated the TTB we must flush the TLB */
-        mcr     15, 0, r0, c8, c7, 0
+        mcr     15, 0, r0, c8, c7, 0   /* invalidate I&D TLB */
 
        /* The cleanID above means we only need to flush the I cache here */
-        mcr     15, 0, r0, c7, c5, 0
+        mcr     15, 0, r0, c7, c5, 0   /* invalidate icache & BTB */
 
        /* Make sure that pipeline is emptied */
         mov     r0, r0
@@ -280,6 +283,45 @@
        mov     pc, lr
 #endif /* CPU_SA110 */
 
+#if defined(CPU_XSCALE)
+ENTRY(xscale_setttb)
+       /* We need to flush the cache as it uses virtual addresses that are about to change */
+#ifdef CACHE_CLEAN_BLOCK_INTR
+       mrs     r3, cpsr_all
+       orr     r1, r3, #(I32_bit | F32_bit)
+       msr     cpsr_all , r1
+#else
+       ldr     r3, Lblock_userspace_access
+       ldr     r2, [r3]
+       orr     r1, r2, #1
+       str     r1, [r3]
+#endif
+       stmfd   sp!, {r0-r3, lr}
+       bl      _C_LABEL(xscale_cache_cleanID)
+       ldmfd   sp!, {r0-r3, lr}
+       mcr     15, 0, r0, c7, c5, 0    /* invalidate icache & BTB */
+       mcr     15, 0, r0, c7, c10, 4   /* drain write (& fill) buffer */
+
+       /* Write the TTB */
+       mcr     15, 0, r0, c2, c0, 0    /* set translation table base */
+
+       /* If we have updated the TTB we must flush the TLB */
+        mcr     15, 0, r0, c8, c7, 0   /* invalidate I&D TLB */
+
+       /* The cleanID above means we only need to flush the I cache here */
+        mcr     15, 0, r0, c7, c5, 0   /* invalidate icache & BTB */
+
+       /* Make sure that pipeline is emptied */
+        mov     r0, r0
+        mov     r0, r0
+#ifdef CACHE_CLEAN_BLOCK_INTR
+       msr     cpsr_all, r3
+#else
+       str     r2, [r3]
+#endif
+       mov     pc, lr
+#endif /* CPU_XSCALE */
+
 /*
  * TLB functions
  */
@@ -314,28 +356,41 @@
        mov     pc, lr
 #endif /* CPU_ARM8 */
 
-#ifdef CPU_SA110
+#if defined(CPU_SA110) || defined(CPU_XSCALE)
+ENTRY_NP(xscale_tlb_flushID)
 ENTRY(sa110_tlb_flushID)
        mcr     15, 0, r0, c8, c7, 0            /* flush I+D tlb */
        mov     pc, lr
 
+#if defined(CPU_SA110)
 ENTRY(sa110_tlb_flushID_SE)
        mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
        mcr     15, 0, r0, c8, c5, 0            /* flush I tlb */
        mov     pc, lr
+#endif /* CPU_SA110 */
 
+#if defined(CPU_XSCALE)
+ENTRY(xscale_tlb_flushID_SE)
+       mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
+       mcr     15, 0, r0, c8, c5, 1            /* flush I tlb single entry */
+       mov     pc, lr
+#endif /* CPU_XSCALE */
+
+ENTRY_NP(xscale_tlb_flushI)
 ENTRY(sa110_tlb_flushI)
        mcr     15, 0, r0, c8, c5, 0            /* flush I tlb */
        mov     pc, lr
 
+ENTRY_NP(xscale_tlb_flushD)
 ENTRY(sa110_tlb_flushD)
        mcr     15, 0, r0, c8, c6, 0            /* flush D tlb */
        mov     pc, lr
 
+ENTRY_NP(xscale_tlb_flushD_SE)
 ENTRY(sa110_tlb_flushD_SE)
        mcr     15, 0, r0, c8, c6, 1            /* flush D tlb single entry */
        mov     pc, lr
-#endif /* CPU_SA110 */
+#endif /* CPU_SA110 || CPU_XSCALE */
 
 /*
  * Cache functions
@@ -518,27 +573,40 @@
        mov     pc, lr
 #endif /* CPU_ARM8 */
 
-#ifdef CPU_SA110
+#if defined(CPU_SA110) || defined(CPU_XSCALE)
+ENTRY_NP(xscale_cache_flushID)
 ENTRY(sa110_cache_flushID)
        mcr     15, 0, r0, c7, c7, 0            /* flush I+D cache */
        mov     pc, lr
 
+ENTRY_NP(xscale_cache_flushI)
 ENTRY(sa110_cache_flushI)
        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
        mov     pc, lr
 
+ENTRY_NP(xscale_cache_flushD)
 ENTRY(sa110_cache_flushD)
        mcr     15, 0, r0, c7, c6, 0            /* flush D cache */
        mov     pc, lr
 
+#if defined(CPU_XSCALE)
+ENTRY(xscale_cache_flushI_SE)
+       mcr     15, 0, r0, c7, c5, 1            /* flush D cache single entry */
+       mov     pc, lr
+#endif
+
+ENTRY_NP(xscale_cache_flushD_SE)
 ENTRY(sa110_cache_flushD_SE)
        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
        mov     pc, lr
 
+ENTRY_NP(xscale_cache_cleanD_E)
 ENTRY(sa110_cache_cleanD_E)
        mcr     15, 0, r0, c7, c10, 1           /* clean D cache entry */
        mov     pc, lr
+#endif /* CPU_SA110 || CPU_XSCALE */
 
+#ifdef CPU_SA110
 /*
  * Information for SA110 cache clean/purge functions
  *
@@ -588,7 +656,6 @@
 #else
        str     ip, [r3]
 #endif
-
        mov     pc, lr  
 
 ENTRY(sa110_cache_purgeID)
@@ -650,25 +717,108 @@
 #endif
        mov     pc, lr  
 
+#endif /* CPU_SA110 */
+
+#ifdef CPU_XSCALE
+/*
+ * Information for XScale cache clean/purge functions
+ *
+ * The address of the blocks of memory to use
+ * The size of the block of memory to use
+ */
+
+       .data
+       .global _C_LABEL(xscale_cache_clean_addr)
+_C_LABEL(xscale_cache_clean_addr):
+       .word   0xf0000000
+       .global _C_LABEL(xscale_cache_clean_size)
+_C_LABEL(xscale_cache_clean_size):
+       .word   0x00008000
+
+       .text
+Lxscale_cache_clean_addr:
+       .word   _C_LABEL(xscale_cache_clean_addr)
+Lxscale_cache_clean_size:
+       .word   _C_LABEL(xscale_cache_clean_size)
+
+ENTRY_NP(xscale_cache_syncI)
+ENTRY_NP(xscale_cache_purgeID)
+       mcr     15, 0, r0, c7, c5, 0    /* flush I cache (D cleaned below) */
+ENTRY_NP(xscale_cache_cleanID)
+ENTRY_NP(xscale_cache_purgeD)
+ENTRY(xscale_cache_cleanD)
+#ifdef CACHE_CLEAN_BLOCK_INTR
+       mrs     r3, cpsr_all
+       orr     r0, r3, #(I32_bit | F32_bit)
+       msr     cpsr_all , r0
+#else
+       ldr     r3, Lblock_userspace_access
+       ldr     ip, [r3]
+       orr     r0, ip, #1
+       str     r0, [r3]
+#endif
+       ldr     r2, Lxscale_cache_clean_addr
+       ldmia   r2, {r0, r1}
+       add     r0, r0, r1
+
+Lxscale_cache_cleanD_loop:
+       subs    r0, r0, #32
+       mcr     15, 0, r0, c7, c2, 5            /* allocate cache line */
+       subs    r1, r1, #32
+       bne     Lxscale_cache_cleanD_loop
+
+/*
+ * Clean mini-data-cache
+ */
+       mov     r1, #64
+Lxscale_cache_cleanD_loop2:
+       ldr     r3, [r0], #32
+       subs    r1, r1, #1
+       bne     Lxscale_cache_cleanD_loop2
+
+       mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
+
+#ifdef CACHE_CLEAN_BLOCK_INTR
+       msr     cpsr_all , r3
+#else
+       str     ip, [r3]
+#endif
+       mov     pc, lr  
+
+#endif /* CPU_XSCALE */
+
+#if defined(CPU_SA110)
 ENTRY(sa110_cache_purgeID_E)
        mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
        mcr     15, 0, r0, c7, c5, 0            /* flush I cache */
        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
        mov     pc, lr
+#endif /* CPU_SA110 */
 
+#if defined(CPU_XSCALE)
+ENTRY(xscale_cache_purgeID_E)
+       mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
+       mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
+       mcr     15, 0, r0, c7, c5, 1            /* flush I cache single entry */
+       mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
+       mov     pc, lr
+#endif /* CPU_XSCALE */
+
+#if defined(CPU_SA110) || defined(CPU_XSCALE)
+ENTRY_NP(xscale_cache_purgeD_E)
 ENTRY(sa110_cache_purgeD_E)
        mcr     15, 0, r0, c7, c10, 1           /* clean dcache entry */
        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
        mcr     15, 0, r0, c7, c6, 1            /* flush D cache single entry */
        mov     pc, lr
-#endif /* CPU_SA110 */
+#endif /* CPU_SA110 || CPU_XSCALE */
 
 /*
  * Other functions
  */
 
-#ifdef CPU_SA110
+#if defined(CPU_SA110) || defined(CPU_XSCALE)
 ENTRY(sa110_drain_writebuf)
        mcr     15, 0, r0, c7, c10, 4           /* drain write buffer */
        mov     pc, lr



Home | Main Index | Thread Index | Old Index