Module Name: src Committed By: matt Date: Sun Mar 30 01:15:03 UTC 2014
Modified Files: src/sys/arch/arm/arm: cpufunc_asm_arm10.S cpufunc_asm_arm11.S cpufunc_asm_arm11x6.S cpufunc_asm_arm3.S cpufunc_asm_arm67.S cpufunc_asm_arm7tdmi.S cpufunc_asm_arm8.S cpufunc_asm_arm9.S cpufunc_asm_armv4.S cpufunc_asm_armv5.S cpufunc_asm_armv5_ec.S cpufunc_asm_fa526.S cpufunc_asm_ixp12x0.S cpufunc_asm_pj4b.S cpufunc_asm_sa1.S cpufunc_asm_sa11x0.S cpufunc_asm_sheeva.S cpufunc_asm_xscale.S Log Message: Allow for 8KB page size. Add ARM_MMU_EXTENDED support. Add missing END() To generate a diff of this commit: cvs rdiff -u -r1.10 -r1.11 src/sys/arch/arm/arm/cpufunc_asm_arm10.S \ src/sys/arch/arm/arm/cpufunc_asm_arm9.S cvs rdiff -u -r1.11 -r1.12 src/sys/arch/arm/arm/cpufunc_asm_arm11.S cvs rdiff -u -r1.3 -r1.4 src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S \ src/sys/arch/arm/arm/cpufunc_asm_arm3.S \ src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S \ src/sys/arch/arm/arm/cpufunc_asm_pj4b.S cvs rdiff -u -r1.6 -r1.7 src/sys/arch/arm/arm/cpufunc_asm_arm67.S \ src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S \ src/sys/arch/arm/arm/cpufunc_asm_armv5.S \ src/sys/arch/arm/arm/cpufunc_asm_fa526.S cvs rdiff -u -r1.9 -r1.10 src/sys/arch/arm/arm/cpufunc_asm_arm8.S cvs rdiff -u -r1.4 -r1.5 src/sys/arch/arm/arm/cpufunc_asm_armv4.S \ src/sys/arch/arm/arm/cpufunc_asm_sheeva.S cvs rdiff -u -r1.5 -r1.6 src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S \ src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S cvs rdiff -u -r1.14 -r1.15 src/sys/arch/arm/arm/cpufunc_asm_sa1.S cvs rdiff -u -r1.22 -r1.23 src/sys/arch/arm/arm/cpufunc_asm_xscale.S Please note that diffs are not public domain; they are subject to the copyright notices on the relevant files.
Modified files: Index: src/sys/arch/arm/arm/cpufunc_asm_arm10.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm10.S:1.10 src/sys/arch/arm/arm/cpufunc_asm_arm10.S:1.11 --- src/sys/arch/arm/arm/cpufunc_asm_arm10.S:1.10 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm10.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm10.S,v 1.10 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm10.S,v 1.11 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002 ARM Limited @@ -33,6 +33,7 @@ #include <machine/asm.h> #include <arm/locore.h> +#include "assym.h" /* * TLB functions @@ -40,12 +41,23 @@ ENTRY(arm10_tlb_flushID_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ + mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#endif RET +END(arm10_tlb_flushID_SE) ENTRY(arm10_tlb_flushI_SE) mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#endif RET - +END(arm10_tlb_flushI_SE) + /* * Context switch. @@ -67,3 +79,4 @@ ENTRY(arm10_context_switch) nop nop RET +END(arm10_context_switch) Index: src/sys/arch/arm/arm/cpufunc_asm_arm9.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm9.S:1.10 src/sys/arch/arm/arm/cpufunc_asm_arm9.S:1.11 --- src/sys/arch/arm/arm/cpufunc_asm_arm9.S:1.10 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm9.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm9.S,v 1.10 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm9.S,v 1.11 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2001, 2004 ARM Limited @@ -31,11 +31,10 @@ * ARM9 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> -#include "assym.h" - /* * Functions to set the MMU Translation Table Base register * @@ -55,6 +54,7 @@ ENTRY(arm9_setttb) mcrne p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */ RET +END(arm9_setttb) /* * TLB functions @@ -62,7 +62,13 @@ ENTRY(arm9_setttb) ENTRY(arm9_tlb_flushID_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ + mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#endif mov pc, lr +END(arm9_tlb_flushID_SE) /* * Cache operations. For the entire cache we use the set/index @@ -89,6 +95,7 @@ ENTRY_NP(arm9_icache_sync_range) subs r1, r1, ip bpl .Larm9_sync_next mov pc, lr +END(arm9_icache_sync_range) ENTRY_NP(arm9_icache_sync_all) .Larm9_icache_sync_all: @@ -117,6 +124,7 @@ ENTRY_NP(arm9_icache_sync_all) .Larm9_line_size: .word _C_LABEL(arm_pcache) + DCACHE_LINE_SIZE +END(arm9_icache_sync_all) ENTRY(arm9_dcache_wb_range) ldr ip, .Larm9_line_size @@ -133,7 +141,8 @@ ENTRY(arm9_dcache_wb_range) subs r1, r1, ip bpl .Larm9_wb_next mov pc, lr - +END(arm9_dcache_wb_range) + ENTRY(arm9_dcache_wbinv_range) ldr ip, .Larm9_line_size cmp r1, #0x4000 @@ -149,7 +158,8 @@ ENTRY(arm9_dcache_wbinv_range) subs r1, r1, ip bpl .Larm9_wbinv_next mov pc, lr - +END(arm9_dcache_wbinv_range) + /* * Note, we must not invalidate everything. If the range is too big we * must use wb-inv of the entire cache. @@ -169,6 +179,7 @@ ENTRY(arm9_dcache_inv_range) subs r1, r1, ip bpl .Larm9_inv_next mov pc, lr +END(arm9_dcache_inv_range) ENTRY(arm9_idcache_wbinv_range) ldr ip, .Larm9_line_size @@ -186,6 +197,7 @@ ENTRY(arm9_idcache_wbinv_range) subs r1, r1, ip bpl .Larm9_id_wbinv_next mov pc, lr +END(arm9_idcache_wbinv_range) ENTRY_NP(arm9_idcache_wbinv_all) .Larm9_idcache_wbinv_all: @@ -215,6 +227,8 @@ ENTRY(arm9_dcache_wbinv_all) .Larm9_cache_data: .word _C_LABEL(arm9_dcache_sets_max) +END(arm9_dcache_wbinv_all) +END(arm9_idcache_wbinv_all) /* * Context switch. @@ -236,6 +250,7 @@ ENTRY(arm9_context_switch) nop nop mov pc, lr +END(arm9_context_switch) .bss Index: src/sys/arch/arm/arm/cpufunc_asm_arm11.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm11.S:1.11 src/sys/arch/arm/arm/cpufunc_asm_arm11.S:1.12 --- src/sys/arch/arm/arm/cpufunc_asm_arm11.S:1.11 Thu Feb 20 17:27:46 2014 +++ src/sys/arch/arm/arm/cpufunc_asm_arm11.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm11.S,v 1.11 2014/02/20 17:27:46 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm11.S,v 1.12 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002, 2005 ARM Limited @@ -49,15 +49,17 @@ ENTRY(arm11_setttb) #error arm11 does not have a VIVT cache. #endif - mcr p15, 0, r0, c2, c0, 0 /* load new TTB */ - cmp r1, #0 + mcr p15, 0, r0, c2, c0, 0 /* TTBR0 set */ +#ifdef ARM_MMU_EXTENDED + mcreq p15, 0, r0, c2, c0, 1 /* TTBR1 set */ +#else mcrne p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */ mcrne p15, 0, r0, c7, c10, 4 /* drain write buffer */ +#endif RET END(arm11_setttb) - /* * Context switch. * @@ -69,9 +71,16 @@ ENTRY(arm11_context_switch) * We can assume that the caches will only contain kernel addresses * at this point. So no need to flush them again. */ +#ifdef ARM_MMU_EXTENDED + cmp r1, #0 +#endif mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ - mcr p15, 0, r0, c2, c0, 0 /* set the new TTB */ + mcr p15, 0, r0, c2, c0, 0 /* TTBR0 set */ +#ifdef ARM_MMU_EXTENDED + mcreq p15, 0, r0, c2, c0, 1 /* TTBR1 set is asid 0 */ +#else mcr p15, 0, r0, c8, c7, 0 /* and flush the I+D tlbs */ +#endif /* Paranoia -- make sure the pipeline is empty. */ nop @@ -96,6 +105,11 @@ ENTRY(arm11_tlb_flushI_SE) orr r0, r0, r1 /* insert ASID into MVA */ #endif mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#endif + mov r0, #0 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ RET @@ -113,6 +127,10 @@ ENTRY(arm11_tlb_flushD_SE) orr r0, r0, r1 /* insert ASID into MVA */ #endif mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ +#endif mov r0, #0 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ RET @@ -130,6 +148,10 @@ ENTRY(arm11_tlb_flushID_SE) orr r0, r0, r1 /* insert ASID into MVA */ #endif mcr p15, 0, r0, c8, c7, 1 /* flush I+D tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c7, 1 /* flush I+D tlb single entry */ +#endif mov r0, #0 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ RET Index: src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S:1.3 src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S:1.4 --- src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S:1.3 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm11x6.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm11x6.S,v 1.3 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm11x6.S,v 1.4 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2007 Microsoft @@ -63,7 +63,7 @@ #include <machine/asm.h> #include <arm/locore.h> -RCSID("$NetBSD: cpufunc_asm_arm11x6.S,v 1.3 2013/08/18 06:28:18 matt Exp $") +RCSID("$NetBSD: cpufunc_asm_arm11x6.S,v 1.4 2014/03/30 01:15:03 matt Exp $") #if 0 #define Invalidate_I_cache(Rtmp1, Rtmp2) \ @@ -114,37 +114,27 @@ RCSID("$NetBSD: cpufunc_asm_arm11x6.S,v mcr p15, 0, reg, c7, c10, 4;/* Data Synchronization Barrier */ #endif -ENTRY(arm11x6_setttb) -#ifdef PMAP_CACHE_VIVT - Flush_D_cache(r2) - Invalidate_I_cache(r2, r3) -#else - mov r2, #0 -#endif - mcr p15, 0, r0, c2, c0, 0 /* load new TTB */ - - cmp r1, #0 - mcrne p15, 0, r2, c8, c7, 0 /* invalidate I+D TLBs */ - mcrne p15, 0, r2, c7, c10, 4 /* drain write buffer */ - RET - ENTRY_NP(arm11x6_idcache_wbinv_all) Flush_D_cache(r0) Invalidate_I_cache(r0, r1) RET +END(arm11x6_idcache_wbinv_all) ENTRY_NP(arm11x6_dcache_wbinv_all) Flush_D_cache(r0) RET +END(arm11x6_dcache_wbinv_all) ENTRY_NP(arm11x6_icache_sync_all) Flush_D_cache(r0) Invalidate_I_cache(r0, r1) RET +END(arm11x6_icache_sync_all) ENTRY_NP(arm11x6_flush_prefetchbuf) mcr p15, 0, r0, c7, c5, 4 /* Flush Prefetch Buffer */ RET +END(arm11x6_flush_prefetchbuf) ENTRY_NP(arm11x6_icache_sync_range) add r1, r1, r0 @@ -171,6 +161,7 @@ ENTRY_NP(arm11x6_icache_sync_range) mcrr p15, 0, r1, r0, c12 /* clean and invalidate D cache range */ /* XXXNH */ mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ RET +END(arm11x6_icache_sync_range) ENTRY_NP(arm11x6_idcache_wbinv_range) add r1, r1, r0 @@ -197,6 +188,7 @@ ENTRY_NP(arm11x6_idcache_wbinv_range) mcrr p15, 0, r1, r0, c14 /* clean and invalidate D cache range */ mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ RET +END(arm11x6_idcache_wbinv_range) /* * Preload the cache before issuing the WFI by conditionally disabling the Index: src/sys/arch/arm/arm/cpufunc_asm_arm3.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm3.S:1.3 src/sys/arch/arm/arm/cpufunc_asm_arm3.S:1.4 --- src/sys/arch/arm/arm/cpufunc_asm_arm3.S:1.3 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm3.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm3.S,v 1.3 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm3.S,v 1.4 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 1997,1998 Mark Brinicombe. @@ -35,6 +35,7 @@ * ARM3 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> Index: src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S diff -u src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S:1.3 src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S:1.4 --- src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S:1.3 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_ixp12x0.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_ixp12x0.S,v 1.3 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_ixp12x0.S,v 1.4 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002 Wasabi Systems, Inc. @@ -60,10 +60,12 @@ ENTRY(ixp12x0_context_switch) mov r0, r0 mov r0, r0 mov pc, lr +END(ixp12x0_context_switch) ENTRY(ixp12x0_drain_readbuf) mcr p15, 0, r0, c9, c0, 0 /* drain read buffer */ mov pc, lr +END(ixp12x0_drain_readbuf) /* * Information for the IXP12X0 cache clean/purge functions: Index: src/sys/arch/arm/arm/cpufunc_asm_pj4b.S diff -u src/sys/arch/arm/arm/cpufunc_asm_pj4b.S:1.3 src/sys/arch/arm/arm/cpufunc_asm_pj4b.S:1.4 --- src/sys/arch/arm/arm/cpufunc_asm_pj4b.S:1.3 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_pj4b.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_pj4b.S,v 1.3 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_pj4b.S,v 1.4 2014/03/30 01:15:03 matt Exp $ */ /******************************************************************************* Copyright (C) Marvell International Ltd. and its affiliates @@ -58,11 +58,18 @@ ENTRY(pj4b_setttb) #else bic r2, r0, #0x18 #endif - mcr p15, 0, r0, c2, c0, 0 /* load new TTB */ + mcr p15, 0, r0, c2, c0, 0 /* load TTBR0 */ +#ifdef ARM_MMU_EXTENDED + cmp r1, #0 + mcreq p15, 0, r0, c2, c0, 1 /* load TTBR1 */ +#else mov r0, #0 - isb mcr p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */ +#endif + isb + dsb RET +END(pj4b_setttb) ENTRY(pj4b_tlb_flushID) mcr p15, 0, r0, c8, c7, 0 /* flush I+D tlb */ @@ -72,6 +79,10 @@ END(pj4b_tlb_flushID) ENTRY(pj4b_tlb_flushID_SE) mcr p15, 0, r0, c8, c7, 1 @flush I+D tlb single entry +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, L2_S_SIZE + mcr p15, 0, r0, c8, c7, 1 @flush I+D tlb single entry +#endif dsb RET END(pj4b_tlb_flushID_SE) @@ -184,20 +195,21 @@ ENTRY(pj4b_flush_brnchtgt_va) RET END(pj4b_flush_brnchtgt_va) -ENTRY(get_core_id) - mrc p15, 0, r0, c0, c0, 5 - RET -END(get_core_id) - ENTRY(pj4b_context_switch) dsb #if defined(L2CACHE_ENABLE) && defined(AURORA_L2_PT_WALK) - orr r1, r0, #TTB_FLAGS_UP + orr r2, r0, #TTB_FLAGS_UP #else - bic r1, r0, #0x18 + bic r2, r0, #0x18 #endif - mcr p15, 0, r1, c2, c0, 0 @set the new TTB + mcr p15, 0, r2, c2, c0, 0 @set the new TTBR0 +#ifdef ARM_MMU_EXTENDED + cmp r1, #0 + mcreq p15, 0, r2, c2, c0, 1 @set the new TTBR1 +#else + mov r0, #0 mcr p15, 0, r0, c8, c7, 0 @flush the I+D +#endif dsb isb RET Index: src/sys/arch/arm/arm/cpufunc_asm_arm67.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm67.S:1.6 src/sys/arch/arm/arm/cpufunc_asm_arm67.S:1.7 --- src/sys/arch/arm/arm/cpufunc_asm_arm67.S:1.6 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm67.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm67.S,v 1.6 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm67.S,v 1.7 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 1997,1998 Mark Brinicombe. @@ -35,6 +35,7 @@ * ARM6/ARM7 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> Index: src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S:1.6 src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S:1.7 --- src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S:1.6 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm7tdmi.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm7tdmi.S,v 1.6 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_arm7tdmi.S,v 1.7 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2001 John Fremlin @@ -34,6 +34,7 @@ * ARM7TDMI assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> @@ -43,6 +44,13 @@ * We need to clean and flush the cache as it uses virtual * addresses that are about to change. */ + +/* + * Context switch. + * + * These are the CPU-specific parts of the context switcher cpu_switch() + * These functions actually perform the TTB reload. + */ ENTRY(arm7tdmi_setttb) mov r3, lr /* ditto with lr */ mov r2, r1 /* store the flush flag in a safe place */ @@ -64,6 +72,8 @@ ENTRY(arm7tdmi_setttb) bl _C_LABEL(arm7tdmi_cache_flushID) mov pc, r3 +END(arm7tdmi_setttb) +STRONG_ALIAS(arm7tdmi_context_switch, arm7tdmi_setttb) /* * TLB functions @@ -72,10 +82,16 @@ ENTRY(arm7tdmi_tlb_flushID) mov r0, #0 mcr p15, 0, r0, c8, c7, 0 mov pc, lr +END(arm7tdmi_tlb_flushID) ENTRY(arm7tdmi_tlb_flushID_SE) mcr p15, 0, r0, c8, c7, 1 +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c7, 1 +#endif mov pc, lr +END(arm7tdmi_tlb_flushID_SE) /* * Cache functions @@ -90,12 +106,4 @@ ENTRY(arm7tdmi_cache_flushID) mov r0, r0 mov pc, lr - -/* - * Context switch. - * - * These are the CPU-specific parts of the context switcher cpu_switch() - * These functions actually perform the TTB reload. - */ -ENTRY(arm7tdmi_context_switch) - b _C_LABEL(arm7tdmi_setttb) +END(arm7tdmi_cache_flushID) Index: src/sys/arch/arm/arm/cpufunc_asm_armv5.S diff -u src/sys/arch/arm/arm/cpufunc_asm_armv5.S:1.6 src/sys/arch/arm/arm/cpufunc_asm_armv5.S:1.7 --- src/sys/arch/arm/arm/cpufunc_asm_armv5.S:1.6 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_armv5.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_armv5.S,v 1.6 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_armv5.S,v 1.7 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002, 2005 ARM Limited @@ -33,11 +33,10 @@ * operations. */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> -#include "assym.h" - /* * Functions to set the MMU Translation Table Base register * Index: src/sys/arch/arm/arm/cpufunc_asm_fa526.S diff -u src/sys/arch/arm/arm/cpufunc_asm_fa526.S:1.6 src/sys/arch/arm/arm/cpufunc_asm_fa526.S:1.7 --- src/sys/arch/arm/arm/cpufunc_asm_fa526.S:1.6 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_fa526.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_fa526.S,v 1.6 2013/08/18 06:28:18 matt Exp $ */ +/* $NetBSD: cpufunc_asm_fa526.S,v 1.7 2014/03/30 01:15:03 matt Exp $ */ /*- * Copyright (c) 2008 The NetBSD Foundation, Inc. * All rights reserved. @@ -28,6 +28,7 @@ * POSSIBILITY OF SUCH DAMAGE. */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> @@ -51,30 +52,43 @@ ENTRY(fa526_setttb) mov r0, r0 mov r0, r0 mov pc, lr +END(fa526_setttb) /* * TLB functions */ ENTRY(fa526_tlb_flushID_SE) mcr p15, 0, r0, c8, c7, 1 /* flush Utlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c7, 1 /* flush Utlb single entry */ +#endif mov pc, lr +END(fa526_tlb_flushID_SE) /* * TLB functions */ ENTRY(fa526_tlb_flushI_SE) mcr p15, 0, r0, c8, c5, 1 /* flush Itlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c5, 1 /* flush Itlb single entry */ +#endif mov pc, lr +END(fa526_tlb_flushI_SE) ENTRY(fa526_cpu_sleep) mov r0, #0 mcr p15, 0, r0, c7, c5, 5 /* Enter sleep mode */ mov pc, lr +END(fa526_cpu_sleep) ENTRY(fa526_flush_prefetchbuf) mov r0, #0 mcr p15, 0, r0, c7, c5, 4 /* Pre-fetch flush */ mov pc, lr +END(fa526_flush_prefetchbuf) /* * Cache functions @@ -85,17 +99,20 @@ ENTRY(fa526_idcache_wbinv_all) mcr p15, 0, r0, c7, c5, 0 /* invalidate I$ */ mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_idcache_wbinv_all) ENTRY(fa526_icache_sync_all) mov r0, #0 mcr p15, 0, r0, c7, c5, 0 /* invalidate I$ */ mov pc, lr +END(fa526_icache_sync_all) ENTRY(fa526_dcache_wbinv_all) mov r0, #0 mcr p15, 0, r0, c7, c14, 0 /* clean and invalidate D$ */ mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_dcache_wbinv_all) /* * Soft functions @@ -115,6 +132,7 @@ ENTRY(fa526_dcache_wbinv_range) mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_dcache_wbinv_range) ENTRY(fa526_dcache_wb_range) cmp r1, #0x4000 @@ -135,6 +153,7 @@ ENTRY(fa526_dcache_wb_range) 3: mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_dcache_wb_range) ENTRY(fa526_dcache_inv_range) and r2, r0, #(CACHELINE_SIZE-1) @@ -147,6 +166,7 @@ ENTRY(fa526_dcache_inv_range) bhi 1b mov pc, lr +END(fa526_dcache_inv_range) ENTRY(fa526_idcache_wbinv_range) cmp r1, #0x4000 @@ -164,6 +184,7 @@ ENTRY(fa526_idcache_wbinv_range) 2: mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_idcache_wbinv_range) ENTRY(fa526_icache_sync_range) cmp r1, #0x4000 @@ -181,11 +202,13 @@ ENTRY(fa526_icache_sync_range) 2: mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(fa526_icache_sync_range) ENTRY(fa526_flush_brnchtgt_E) mov r0, #0 mcr p15, 0, r0, c7, c5, 6 /* invalidate BTB cache */ mov pc, lr +END(fa526_flush_brnchtgt_E) ENTRY(fa526_context_switch) /* @@ -205,4 +228,4 @@ ENTRY(fa526_context_switch) mov r0, r0 mov r0, r0 mov pc, lr - +END(fa526_context_switch) Index: src/sys/arch/arm/arm/cpufunc_asm_arm8.S diff -u src/sys/arch/arm/arm/cpufunc_asm_arm8.S:1.9 src/sys/arch/arm/arm/cpufunc_asm_arm8.S:1.10 --- src/sys/arch/arm/arm/cpufunc_asm_arm8.S:1.9 Mon Dec 2 18:36:10 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_arm8.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_arm8.S,v 1.9 2013/12/02 18:36:10 joerg Exp $ */ +/* $NetBSD: cpufunc_asm_arm8.S,v 1.10 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 1997 ARM Limited @@ -35,6 +35,7 @@ * ARM8 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> @@ -57,6 +58,7 @@ ENTRY(arm8_clock_config) mcr p15, 0, r2, c15, c0, 0 /* Write clock register */ mov r0, r3 /* Return old value */ mov pc, lr +END(arm8_clock_config) /* * Functions to set the MMU Translation Table Base register @@ -92,6 +94,7 @@ ENTRY(arm8_setttb) msr cpsr_all, r3 mov pc, lr +END(arm8_setttb) /* * TLB functions @@ -99,10 +102,16 @@ ENTRY(arm8_setttb) ENTRY(arm8_tlb_flushID) mcr p15, 0, r0, c8, c7, 0 /* flush I+D tlb */ mov pc, lr +END(arm8_tlb_flushID) ENTRY(arm8_tlb_flushID_SE) mcr p15, 0, r0, c8, c7, 1 /* flush I+D tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c7, 1 /* flush I+D tlb single entry */ +#endif mov pc, lr +END(arm8_tlb_flushID_SE) /* * Cache functions @@ -110,10 +119,12 @@ ENTRY(arm8_tlb_flushID_SE) ENTRY(arm8_cache_flushID) mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */ mov pc, lr +END(arm8_cache_flushID) ENTRY(arm8_cache_flushID_E) mcr p15, 0, r0, c7, c7, 1 /* flush I+D single entry */ mov pc, lr +END(arm8_cache_flushID_E) ENTRY(arm8_cache_cleanID) mov r0, #0x00000000 @@ -155,10 +166,12 @@ ENTRY(arm8_cache_cleanID) bne 1b mov pc, lr +END(arm8_cache_cleanID) ENTRY(arm8_cache_cleanID_E) mcr p15, 0, r0, c7, c11, 1 /* clean I+D single entry */ mov pc, lr +END(arm8_cache_cleanID_E) ENTRY(arm8_cache_purgeID) /* @@ -234,6 +247,7 @@ ENTRY(arm8_cache_purgeID) msr cpsr_all, r3 mov pc, lr +END(arm8_cache_purgeID) ENTRY(arm8_cache_purgeID_E) /* @@ -255,6 +269,7 @@ ENTRY(arm8_cache_purgeID_E) mcr p15, 0, r0, c7, c7, 1 /* flush I+D single entry */ msr cpsr_all, r3 mov pc, lr +END(arm8_cache_purgeID_E) /* * Context switch. @@ -281,3 +296,4 @@ ENTRY(arm8_context_switch) mov r0, r0 mov r0, r0 mov pc, lr +END(arm8_context_switch) Index: src/sys/arch/arm/arm/cpufunc_asm_armv4.S diff -u src/sys/arch/arm/arm/cpufunc_asm_armv4.S:1.4 src/sys/arch/arm/arm/cpufunc_asm_armv4.S:1.5 --- src/sys/arch/arm/arm/cpufunc_asm_armv4.S:1.4 Tue Jan 28 16:45:25 2014 +++ src/sys/arch/arm/arm/cpufunc_asm_armv4.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_armv4.S,v 1.4 2014/01/28 16:45:25 martin Exp $ */ +/* $NetBSD: cpufunc_asm_armv4.S,v 1.5 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2001 ARM Limited @@ -33,9 +33,10 @@ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * - * ARM9 assembly functions for CPU / MMU / TLB specific operations + * ARMv4 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> @@ -61,6 +62,10 @@ END(armv4_tlb_flushD) ENTRY(armv4_tlb_flushD_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ +#endif mov pc, lr END(armv4_tlb_flushD_SE) Index: src/sys/arch/arm/arm/cpufunc_asm_sheeva.S diff -u src/sys/arch/arm/arm/cpufunc_asm_sheeva.S:1.4 src/sys/arch/arm/arm/cpufunc_asm_sheeva.S:1.5 --- src/sys/arch/arm/arm/cpufunc_asm_sheeva.S:1.4 Sun Aug 18 06:28:18 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_sheeva.S Sun Mar 30 01:15:03 2014 @@ -29,11 +29,10 @@ * SUCH DAMAGE. */ +#include "assym.h" #include <arm/asm.h> #include <arm/locore.h> -#include "assym.h" - .Lsheeva_cache_line_size: .word _C_LABEL(arm_pcache) + DCACHE_LINE_SIZE .Lsheeva_asm_page_mask: Index: src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S diff -u src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S:1.5 src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S:1.6 --- src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S:1.5 Wed Dec 25 22:04:28 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_armv5_ec.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_armv5_ec.S,v 1.5 2013/12/25 22:04:28 joerg Exp $ */ +/* $NetBSD: cpufunc_asm_armv5_ec.S,v 1.6 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002, 2005 ARM Limited @@ -36,11 +36,10 @@ * This source was derived from that file. */ +#include "assym.h" #include <machine/asm.h> #include <arm/locore.h> -#include "assym.h" - /* * Functions to set the MMU Translation Table Base register * Index: src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S diff -u src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S:1.5 src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S:1.6 --- src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S:1.5 Mon Dec 2 18:36:10 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_sa11x0.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_sa11x0.S,v 1.5 2013/12/02 18:36:10 joerg Exp $ */ +/* $NetBSD: cpufunc_asm_sa11x0.S,v 1.6 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2002 Wasabi Systems, Inc. @@ -35,6 +35,7 @@ * POSSIBILITY OF SUCH DAMAGE. */ +#include "assym.h" #include <arm/asm.h> #include <arm/locore.h> Index: src/sys/arch/arm/arm/cpufunc_asm_sa1.S diff -u src/sys/arch/arm/arm/cpufunc_asm_sa1.S:1.14 src/sys/arch/arm/arm/cpufunc_asm_sa1.S:1.15 --- src/sys/arch/arm/arm/cpufunc_asm_sa1.S:1.14 Mon Dec 2 18:36:10 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_sa1.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_sa1.S,v 1.14 2013/12/02 18:36:10 joerg Exp $ */ +/* $NetBSD: cpufunc_asm_sa1.S,v 1.15 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 1997,1998 Mark Brinicombe. @@ -35,6 +35,7 @@ * SA-1 assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <arm/asm.h> #include <arm/locore.h> @@ -85,6 +86,7 @@ ENTRY(sa1_setttb) str r2, [r3] #endif RET +END(sa1_setttb) /* * TLB functions @@ -92,7 +94,13 @@ ENTRY(sa1_setttb) ENTRY(sa1_tlb_flushID_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ mcr p15, 0, r0, c8, c5, 0 /* flush I tlb */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ + mcr p15, 0, r0, c8, c5, 0 /* flush I tlb */ +#endif mov pc, lr +END(sa1_tlb_flushID_SE) /* * Cache functions @@ -100,22 +108,27 @@ ENTRY(sa1_tlb_flushID_SE) ENTRY(sa1_cache_flushID) mcr p15, 0, r0, c7, c7, 0 /* flush I+D cache */ mov pc, lr +END(sa1_cache_flushID) ENTRY(sa1_cache_flushI) mcr p15, 0, r0, c7, c5, 0 /* flush I cache */ mov pc, lr +END(sa1_cache_flushI) ENTRY(sa1_cache_flushD) mcr p15, 0, r0, c7, c6, 0 /* flush D cache */ mov pc, lr +END(sa1_cache_flushD) ENTRY(sa1_cache_flushD_SE) mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */ mov pc, lr +END(sa1_cache_flushD_SE) ENTRY(sa1_cache_cleanD_E) mcr p15, 0, r0, c7, c10, 1 /* clean D cache entry */ mov pc, lr +END(sa1_cache_cleanD_E) /* * Information for the SA-1 cache clean/purge functions: @@ -196,6 +209,7 @@ ENTRY(sa1_cache_cleanD) SA1_CACHE_CLEAN_EPILOGUE mov pc, lr +END(sa1_cache_cleanD) ENTRY(sa1_cache_purgeID_E) mcr p15, 0, r0, c7, c10, 1 /* clean dcache entry */ @@ -203,12 +217,14 @@ ENTRY(sa1_cache_purgeID_E) mcr p15, 0, r0, c7, c5, 0 /* flush I cache */ mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */ mov pc, lr +END(sa1_cache_purgeID_E) ENTRY(sa1_cache_purgeD_E) mcr p15, 0, r0, c7, c10, 1 /* clean dcache entry */ mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */ mov pc, lr +END(sa1_cache_purgeD_E) /* * Soft functions @@ -231,6 +247,8 @@ ENTRY(sa1_cache_cleanD_rng) mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(sa1_cache_cleanD_rng) +END(sa1_cache_cleanID_rng) ENTRY(sa1_cache_purgeID_rng) cmp r1, #0x4000 @@ -249,6 +267,7 @@ ENTRY(sa1_cache_purgeID_rng) mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mcr p15, 0, r0, c7, c5, 0 /* flush I cache */ mov pc, lr +END(sa1_cache_purgeID_rng) ENTRY(sa1_cache_purgeD_rng) cmp r1, #0x4000 @@ -266,6 +285,7 @@ ENTRY(sa1_cache_purgeD_rng) mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr +END(sa1_cache_purgeD_rng) ENTRY(sa1_cache_syncI_rng) cmp r1, #0x4000 @@ -284,6 +304,7 @@ ENTRY(sa1_cache_syncI_rng) mcr p15, 0, r0, c7, c5, 0 /* flush I cache */ mov pc, lr +END(sa1_cache_syncI_rng) /* * Context switch. @@ -310,4 +331,5 @@ ENTRY(sa110_context_switch) mov r0, r0 mov r0, r0 mov pc, lr +END(sa110_context_switch) #endif Index: src/sys/arch/arm/arm/cpufunc_asm_xscale.S diff -u src/sys/arch/arm/arm/cpufunc_asm_xscale.S:1.22 src/sys/arch/arm/arm/cpufunc_asm_xscale.S:1.23 --- src/sys/arch/arm/arm/cpufunc_asm_xscale.S:1.22 Mon Dec 2 18:36:10 2013 +++ src/sys/arch/arm/arm/cpufunc_asm_xscale.S Sun Mar 30 01:15:03 2014 @@ -1,4 +1,4 @@ -/* $NetBSD: cpufunc_asm_xscale.S,v 1.22 2013/12/02 18:36:10 joerg Exp $ */ +/* $NetBSD: cpufunc_asm_xscale.S,v 1.23 2014/03/30 01:15:03 matt Exp $ */ /* * Copyright (c) 2001, 2002 Wasabi Systems, Inc. @@ -71,6 +71,7 @@ * XScale assembly functions for CPU / MMU / TLB specific operations */ +#include "assym.h" #include <arm/asm.h> #include <arm/locore.h> @@ -185,6 +186,11 @@ END(xscale_setttb) ENTRY(xscale_tlb_flushID_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#if PAGE_SIZE == 2 * L2_S_SIZE + add r0, r0, #L2_S_SIZE + mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ + mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ +#endif RET END(xscale_tlb_flushID_SE)