Author: andrew
Date: Sat Sep 27 09:57:34 2014
New Revision: 272209
URL: http://svnweb.freebsd.org/changeset/base/272209

Log:
  Add machine/sysreg.h to simplify accessing the system control coprocessor
  registers and use it in the ARMv7 CPU functions.
  
  The sysreg.h file has been checked by hand, however it may contain errors
  with the comments on when a register was first introduced. The ARMv7 cpu
  functions have been checked by compiling both the previous and this version
  and comparing the md5 of the object files.
  
  Submitted by: Svatopluk Kraus <onwahe at gmail.com>
  Submitted by: Michal Meloun <meloun at miracle.cz>
  Reviewed by:  ian, rpaulo
  Differential Revision: https://reviews.freebsd.org/D795

Added:
  head/sys/arm/include/sysreg.h   (contents, props changed)
Modified:
  head/sys/arm/arm/cpufunc_asm_armv7.S

Modified: head/sys/arm/arm/cpufunc_asm_armv7.S
==============================================================================
--- head/sys/arm/arm/cpufunc_asm_armv7.S        Sat Sep 27 09:39:19 2014        
(r272208)
+++ head/sys/arm/arm/cpufunc_asm_armv7.S        Sat Sep 27 09:57:34 2014        
(r272209)
@@ -33,6 +33,8 @@
 #include <machine/asm.h>
 __FBSDID("$FreeBSD$");
 
+#include <machine/sysreg.h>
+
        .cpu cortex-a8
 
 .Lcoherency_level:
@@ -70,12 +72,12 @@ ENTRY(armv7_setttb)
        dsb
                                
        orr     r0, r0, #PT_ATTR
-       mcr     p15, 0, r0, c2, c0, 0   /* Translation Table Base Register 0 
(TTBR0) */
+       mcr     CP15_TTBR0(r0)
        isb
 #ifdef SMP
-       mcr     p15, 0, r0, c8, c3, 0   /* invalidate I+D TLBs Inner Shareable*/
+       mcr     CP15_TLBIALLIS
 #else
-       mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLBs */
+       mcr     CP15_TLBIALL
 #endif
        dsb
        isb
@@ -85,11 +87,11 @@ END(armv7_setttb)
 ENTRY(armv7_tlb_flushID)
        dsb
 #ifdef SMP
-       mcr     p15, 0, r0, c8, c3, 0   /* flush Unified TLB all entries Inner 
Shareable */
-       mcr     p15, 0, r0, c7, c1, 6   /* flush BTB Inner Shareable */
+       mcr     CP15_TLBIALLIS
+       mcr     CP15_BPIALLIS
 #else
-       mcr     p15, 0, r0, c8, c7, 0   /* flush Unified TLB all entries */
-       mcr     p15, 0, r0, c7, c5, 6   /* flush BTB */
+       mcr     CP15_TLBIALL
+       mcr     CP15_BPIALL
 #endif
        dsb
        isb
@@ -100,11 +102,11 @@ ENTRY(armv7_tlb_flushID_SE)
        ldr     r1, .Lpage_mask
        bic     r0, r0, r1
 #ifdef SMP
-       mcr     p15, 0, r0, c8, c3, 3   /* flush Unified TLB single entry Inner 
Shareable */
-       mcr     p15, 0, r0, c7, c1, 6   /* flush BTB Inner Shareable */
+       mcr     CP15_TLBIMVAAIS(r0)
+       mcr     CP15_BPIALLIS
 #else
-       mcr     p15, 0, r0, c8, c7, 1   /* flush Unified TLB single entry */
-       mcr     p15, 0, r0, c7, c5, 6   /* flush BTB */
+       mcr     CP15_TLBIMVA(r0)
+       mcr     CP15_BPIALL
 #endif
        dsb
        isb
@@ -149,7 +151,7 @@ Loop3:
        orr     r6, r6, r7, lsl r2
 
        /* Clean and invalidate data cache by way/index */
-       mcr     p15, 0, r6, c7, c14, 2
+       mcr     CP15_DCCISW(r6)
        subs    r9, r9, #1
        bge     Loop3
        subs    r7, r7, #1
@@ -168,9 +170,9 @@ ENTRY(armv7_idcache_wbinv_all)
        stmdb   sp!, {lr}
        bl armv7_dcache_wbinv_all
 #ifdef SMP
-       mcr     p15, 0, r0, c7, c1, 0   /* Invalidate all I caches to PoU 
(ICIALLUIS) */
+       mcr     CP15_ICIALLUIS
 #else
-       mcr     p15, 0, r0, c7, c5, 0   /* Invalidate all I caches to PoU 
(ICIALLU) */
+       mcr     CP15_ICIALLU
 #endif
        dsb
        isb
@@ -191,7 +193,7 @@ ENTRY(armv7_dcache_wb_range)
        add     r1, r1, r2
        bic     r0, r0, r3
 .Larmv7_wb_next:
-       mcr     p15, 0, r0, c7, c10, 1  /* Clean D cache SE with VA */
+       mcr     CP15_DCCMVAC(r0)
        add     r0, r0, ip
        subs    r1, r1, ip
        bhi     .Larmv7_wb_next
@@ -206,7 +208,7 @@ ENTRY(armv7_dcache_wbinv_range)
        add     r1, r1, r2
        bic     r0, r0, r3
 .Larmv7_wbinv_next:
-       mcr     p15, 0, r0, c7, c14, 1  /* Purge D cache SE with VA */
+       mcr     CP15_DCCIMVAC(r0)
        add     r0, r0, ip
        subs    r1, r1, ip
        bhi     .Larmv7_wbinv_next
@@ -225,7 +227,7 @@ ENTRY(armv7_dcache_inv_range)
        add     r1, r1, r2
        bic     r0, r0, r3
 .Larmv7_inv_next:
-       mcr     p15, 0, r0, c7, c6, 1   /* Invalidate D cache SE with VA */
+       mcr     CP15_DCIMVAC(r0)
        add     r0, r0, ip
        subs    r1, r1, ip
        bhi     .Larmv7_inv_next
@@ -240,8 +242,8 @@ ENTRY(armv7_idcache_wbinv_range)
        add     r1, r1, r2
        bic     r0, r0, r3
 .Larmv7_id_wbinv_next:
-       mcr     p15, 0, r0, c7, c5, 1   /* Invalidate I cache SE with VA */
-       mcr     p15, 0, r0, c7, c14, 1  /* Purge D cache SE with VA */
+       mcr     CP15_ICIMVAU(r0)
+       mcr     CP15_DCCIMVAC(r0)
        add     r0, r0, ip
        subs    r1, r1, ip
        bhi     .Larmv7_id_wbinv_next
@@ -252,9 +254,9 @@ END(armv7_idcache_wbinv_range)
 
 ENTRY_NP(armv7_icache_sync_all)
 #ifdef SMP
-       mcr     p15, 0, r0, c7, c1, 0   /* Invalidate all I cache to PoU Inner 
Shareable */
+       mcr     CP15_ICIALLUIS
 #else
-       mcr     p15, 0, r0, c7, c5, 0   /* Invalidate all I cache to PoU 
(ICIALLU) */
+       mcr     CP15_ICIALLU
 #endif
        isb                             /* instruction synchronization barrier 
*/
        dsb                             /* data synchronization barrier */
@@ -264,8 +266,8 @@ END(armv7_icache_sync_all)
 ENTRY_NP(armv7_icache_sync_range)
        ldr     ip, .Larmv7_line_size
 .Larmv7_sync_next:
-       mcr     p15, 0, r0, c7, c5, 1   /* Invalidate I cache SE with VA */
-       mcr     p15, 0, r0, c7, c10, 1  /* Clean D cache SE with VA */
+       mcr     CP15_ICIMVAU(r0)
+       mcr     CP15_DCCMVAC(r0)
        add     r0, r0, ip
        subs    r1, r1, ip
        bhi     .Larmv7_sync_next
@@ -283,13 +285,13 @@ END(armv7_cpu_sleep)
 ENTRY(armv7_context_switch)
        dsb
        orr     r0, r0, #PT_ATTR
-                       
-       mcr     p15, 0, r0, c2, c0, 0   /* set the new TTB */
+
+       mcr     CP15_TTBR0(r0)
        isb
 #ifdef SMP
-       mcr     p15, 0, r0, c8, c3, 0   /* and flush the I+D tlbs Inner 
Sharable */
+       mcr     CP15_TLBIALLIS
 #else
-       mcr     p15, 0, r0, c8, c7, 0   /* and flush the I+D tlbs */
+       mcr     CP15_TLBIALL
 #endif
        dsb
        isb
@@ -309,12 +311,12 @@ ENTRY(armv7_sev)
 END(armv7_sev)
 
 ENTRY(armv7_auxctrl)
-       mrc p15, 0, r2, c1, c0, 1
+       mrc     CP15_ACTLR(r2)
        bic r3, r2, r0  /* Clear bits */
        eor r3, r3, r1  /* XOR bits */
 
        teq r2, r3
-       mcrne p15, 0, r3, c1, c0, 1
+       mcrne   CP15_ACTLR(r3)
        mov r0, r2
        RET
 END(armv7_auxctrl)
@@ -325,8 +327,8 @@ END(armv7_auxctrl)
  */
 ENTRY(armv7_idcache_inv_all)
        mov     r0, #0
-       mcr     p15, 2, r0, c0, c0, 0   @ set cache level to L1
-       mrc     p15, 1, r0, c0, c0, 0   @ read CCSIDR
+       mcr     CP15_CSSELR(r0)         @ set cache level to L1
+       mrc     CP15_CCSIDR(r0)
 
        ubfx    r2, r0, #13, #15        @ get num sets - 1 from CCSIDR
        ubfx    r3, r0, #3, #10         @ get numways - 1 from CCSIDR
@@ -345,7 +347,7 @@ ENTRY(armv7_idcache_inv_all)
        mov     r2, ip                  @ r2 now contains set way decr
 
        /* r3 = ways/sets, r2 = way decr, r1 = set decr, r0 and ip are free */
-1:      mcr     p15, 0, r3, c7, c6, 2   @ invalidate line
+1:     mcr     CP15_DCISW(r3)          @ invalidate line
        movs    r0, r3                  @ get current way/set
        beq     2f                      @ at 0 means we are done.
        movs    r0, r0, lsl #10         @ clear way bits leaving only set bits
@@ -355,7 +357,7 @@ ENTRY(armv7_idcache_inv_all)
 
 2:     dsb                             @ wait for stores to finish
        mov     r0, #0                  @ and ...
-       mcr     p15, 0, r0, c7, c5, 0   @ invalidate instruction+branch cache
+       mcr     CP15_ICIALLU            @ invalidate instruction+branch cache
        isb                             @ instruction sync barrier
        bx      lr                      @ return
 END(armv7_idcache_inv_all)

Added: head/sys/arm/include/sysreg.h
==============================================================================
--- /dev/null   00:00:00 1970   (empty, because file is newly added)
+++ head/sys/arm/include/sysreg.h       Sat Sep 27 09:57:34 2014        
(r272209)
@@ -0,0 +1,230 @@
+/*-
+ * Copyright 2014 Svatopluk Kraus <[email protected]>
+ * Copyright 2014 Michal Meloun <[email protected]>
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * $FreeBSD$
+ */
+
+/*
+ * Macros to make working with the System Control Registers simpler.
+ */
+
+#ifndef MACHINE_SYSREG_H
+#define        MACHINE_SYSREG_H
+
+/*
+ * CP15 C0 registers
+ */
+#define        CP15_MIDR(rr)           p15, 0, rr, c0, c0,  0 /* Main ID 
Register */
+#define        CP15_CTR(rr)            p15, 0, rr, c0, c0,  1 /* Cache Type 
Register */
+#define        CP15_TCMTR(rr)          p15, 0, rr, c0, c0,  2 /* TCM Type 
Register */
+#define        CP15_TLBTR(rr)          p15, 0, rr, c0, c0,  3 /* TLB Type 
Register */
+#define        CP15_MPIDR(rr)          p15, 0, rr, c0, c0,  5 /* 
Multiprocessor Affinity Register */
+#define        CP15_REVIDR(rr)         p15, 0, rr, c0, c0,  6 /* Revision ID 
Register */
+
+#define        CP15_ID_PFR0(rr)        p15, 0, rr, c0, c1,  0 /* Processor 
Feature Register 0 */
+#define        CP15_ID_PFR1(rr)        p15, 0, rr, c0, c1,  1 /* Processor 
Feature Register 1 */
+#define        CP15_ID_DFR0(rr)        p15, 0, rr, c0, c1,  2 /* Debug Feature 
Register 0 */
+#define        CP15_ID_AFR0(rr)        p15, 0, rr, c0, c1,  3 /* Auxiliary 
Feature Register  0 */
+#define        CP15_ID_MMFR0(rr)       p15, 0, rr, c0, c1,  4 /* Memory Model 
Feature Register 0 */
+#define        CP15_ID_MMFR1(rr)       p15, 0, rr, c0, c1,  5 /* Memory Model 
Feature Register 1 */
+#define        CP15_ID_MMFR2(rr)       p15, 0, rr, c0, c1,  6 /* Memory Model 
Feature Register 2 */
+#define        CP15_ID_MMFR3(rr)       p15, 0, rr, c0, c1,  7 /* Memory Model 
Feature Register 3 */
+
+#define        CP15_ID_ISAR0(rr)       p15, 0, rr, c0, c2,  0 /* Instruction 
Set Attribute Register 0 */
+#define        CP15_ID_ISAR1(rr)       p15, 0, rr, c0, c2,  1 /* Instruction 
Set Attribute Register 1 */
+#define        CP15_ID_ISAR2(rr)       p15, 0, rr, c0, c2,  2 /* Instruction 
Set Attribute Register 2 */
+#define        CP15_ID_ISAR3(rr)       p15, 0, rr, c0, c2,  3 /* Instruction 
Set Attribute Register 3 */
+#define        CP15_ID_ISAR4(rr)       p15, 0, rr, c0, c2,  4 /* Instruction 
Set Attribute Register 4 */
+#define        CP15_ID_ISAR5(rr)       p15, 0, rr, c0, c2,  5 /* Instruction 
Set Attribute Register 5 */
+
+#define        CP15_CCSIDR(rr)         p15, 1, rr, c0, c0,  0 /* Cache Size ID 
Registers */
+#define        CP15_CLIDR(rr)          p15, 1, rr, c0, c0,  1 /* Cache Level 
ID Register */
+#define        CP15_AIDR(rr)           p15, 1, rr, c0, c0,  7 /* Auxiliary ID 
Register */
+
+#define        CP15_CSSELR(rr)         p15, 2, rr, c0, c0,  0 /* Cache Size 
Selection Register */
+
+/*
+ * CP15 C1 registers
+ */
+#define        CP15_SCTLR(rr)          p15, 0, rr, c1, c0,  0 /* System 
Control Register */
+#define        CP15_ACTLR(rr)          p15, 0, rr, c1, c0,  1 /* 
IMPLEMENTATION DEFINED Auxiliary Control Register */
+#define        CP15_CPACR(rr)          p15, 0, rr, c1, c0,  2 /* Coprocessor 
Access Control Register */
+
+#define        CP15_SCR(rr)            p15, 0, rr, c1, c1,  0 /* Secure 
Configuration Register */
+#define        CP15_SDER(rr)           p15, 0, rr, c1, c1,  1 /* Secure Debug 
Enable Register */
+#define        CP15_NSACR(rr)          p15, 0, rr, c1, c1,  2 /* Non-Secure 
Access Control Register */
+
+/*
+ * CP15 C2 registers
+ */
+#define        CP15_TTBR0(rr)          p15, 0, rr, c2, c0,  0 /* Translation 
Table Base Register 0 */
+#define        CP15_TTBR1(rr)          p15, 0, rr, c2, c0,  1 /* Translation 
Table Base Register 1 */
+#define        CP15_TTBCR(rr)          p15, 0, rr, c2, c0,  2 /* Translation 
Table Base Control Register */
+
+/*
+ * CP15 C3 registers
+ */
+#define        CP15_DACR(rr)           p15, 0, rr, c3, c0,  0 /* Domain Access 
Control Register */
+
+/*
+ * CP15 C5 registers
+ */
+#define        CP15_DFSR(rr)           p15, 0, rr, c5, c0,  0 /* Data Fault 
Status Register */
+
+#if __ARM_ARCH >= 6
+/* From ARMv6: */
+#define        CP15_IFSR(rr)           p15, 0, rr, c5, c0,  1 /* Instruction 
Fault Status Register */
+/* From ARMv7: */
+#define        CP15_ADFSR(rr)          p15, 0, rr, c5, c1,  0 /* Auxiliary 
Data Fault Status Register */
+#define        CP15_AIFSR(rr)          p15, 0, rr, c5, c1,  1 /* Auxiliary 
Instruction Fault Status Register */
+#endif
+
+
+/*
+ * CP15 C6 registers
+ */
+#define        CP15_DFAR(rr)           p15, 0, rr, c6, c0,  0 /* Data Fault 
Address Register */
+
+#if __ARM_ARCH >= 6
+/* From ARMv6k: */
+#define        CP15_IFAR(rr)           p15, 0, rr, c6, c0,  2 /* Instruction 
Fault Address Register */
+#endif
+
+/*
+ * CP15 C7 registers
+ */
+#if __ARM_ARCH >= 6
+/* From ARMv7: */
+#define        CP15_ICIALLUIS          p15, 0, r0, c7, c1,  0 /* Instruction 
cache invalidate all PoU, IS */
+#define        CP15_BPIALLIS           p15, 0, r0, c7, c1,  6 /* Branch 
predictor invalidate all IS */
+#endif
+
+#define        CP15_PAR                p15, 0, r0, c7, c4,  0 /* Physical 
Address Register */
+
+#define        CP15_ICIALLU            p15, 0, r0, c7, c5,  0 /* Instruction 
cache invalidate all PoU */
+#define        CP15_ICIMVAU(rr)        p15, 0, rr, c7, c5,  1 /* Instruction 
cache invalidate */
+#if __ARM_ARCH >= 6
+/* Deprecated in ARMv7 */
+#define        CP15_CP15ISB            p15, 0, r0, c7, c5,  4 /* ISB */
+#endif
+#define        CP15_BPIALL             p15, 0, r0, c7, c5,  6 /* Branch 
predictor invalidate all */
+#define        CP15_BPIMVA             p15, 0, rr, c7, c5,  7 /* Branch 
predictor invalidate by MVA */
+
+#if __ARM_ARCH >= 6
+/* Only ARMv6: */
+#define        CP15_DCIALL             p15, 0, r0, c7, c6,  0 /* Data cache 
invalidate all */
+#endif
+#define        CP15_DCIMVAC(rr)        p15, 0, rr, c7, c6,  1 /* Data cache 
invalidate by MVA PoC */
+#define        CP15_DCISW(rr)          p15, 0, rr, c7, c6,  2 /* Data cache 
invalidate by set/way */
+
+#define        CP15_ATS1CPR(rr)        p15, 0, rr, c7, c8,  0 /* Stage 1 
Current state PL1 read */
+#define        CP15_ATS1CPW(rr)        p15, 0, rr, c7, c8,  1 /* Stage 1 
Current state PL1 write */
+#define        CP15_ATS1CUR(rr)        p15, 0, rr, c7, c8,  2 /* Stage 1 
Current state unprivileged read */
+#define        CP15_ATS1CUW(rr)        p15, 0, rr, c7, c8,  3 /* Stage 1 
Current state unprivileged write */
+
+#if __ARM_ARCH >= 6
+/* From ARMv7: */
+#define        CP15_ATS12NSOPR(rr)     p15, 0, rr, c7, c8,  4 /* Stages 1 and 
2 Non-secure only PL1 read */
+#define        CP15_ATS12NSOPW(rr)     p15, 0, rr, c7, c8,  5 /* Stages 1 and 
2 Non-secure only PL1 write */
+#define        CP15_ATS12NSOUR(rr)     p15, 0, rr, c7, c8,  6 /* Stages 1 and 
2 Non-secure only unprivileged read */
+#define        CP15_ATS12NSOUW(rr)     p15, 0, rr, c7, c8,  7 /* Stages 1 and 
2 Non-secure only unprivileged write */
+#endif
+
+#if __ARM_ARCH >= 6
+/* Only ARMv6: */
+#define        CP15_DCCALL             p15, 0, r0, c7, c10, 0 /* Data cache 
clean all */
+#endif
+#define        CP15_DCCMVAC(rr)        p15, 0, rr, c7, c10, 1 /* Data cache 
clean by MVA PoC */
+#define        CP15_DCCSW(rr)          p15, 0, rr, c7, c10, 2 /* Data cache 
clean by set/way */
+#if __ARM_ARCH >= 6
+/* Only ARMv6: */
+#define        CP15_CP15DSB            p15, 0, r0, c7, c10, 4 /* DSB */
+#define        CP15_CP15DMB            p15, 0, r0, c7, c10, 5 /* DMB */
+#endif
+
+#if __ARM_ARCH >= 6
+/* From ARMv7: */
+#define        CP15_DCCMVAU(rr)        p15, 0, rr, c7, c11, 1 /* Data cache 
clean by MVA PoU */
+#endif
+
+#if __ARM_ARCH >= 6
+/* Only ARMv6: */
+#define        CP15_DCCIALL            p15, 0, r0, c7, c14, 0 /* Data cache 
clean and invalidate all */
+#endif
+#define        CP15_DCCIMVAC(rr)       p15, 0, rr, c7, c14, 1 /* Data cache 
clean and invalidate by MVA PoC */
+#define        CP15_DCCISW(rr)         p15, 0, rr, c7, c14, 2 /* Data cache 
clean and invalidate by set/way */
+
+/*
+ * CP15 C8 registers
+ */
+#if __ARM_ARCH >= 6
+/* From ARMv7: */
+#define        CP15_TLBIALLIS          p15, 0, r0, c8, c3, 0 /* Invalidate 
entire unified TLB IS */
+#define        CP15_TLBIMVAIS(rr)      p15, 0, rr, c8, c3, 1 /* Invalidate 
unified TLB by MVA IS */
+#define        CP15_TLBIASIDIS(rr)     p15, 0, rr, c8, c3, 2 /* Invalidate 
unified TLB by ASID IS */
+#define        CP15_TLBIMVAAIS(rr)     p15, 0, rr, c8, c3, 3 /* Invalidate 
unified TLB by MVA, all ASID IS */
+#endif
+
+#define        CP15_TLBIALL            p15, 0, r0, c8, c7, 0 /* Invalidate 
entire unified TLB */
+#define        CP15_TLBIMVA(rr)        p15, 0, rr, c8, c7, 1 /* Invalidate 
unified TLB by MVA */
+#define        CP15_TLBIASID(rr)       p15, 0, rr, c8, c7, 2 /* Invalidate 
unified TLB by ASID */
+
+#if __ARM_ARCH >= 6
+/* From ARMv6: */
+#define        CP15_TLBIMVAA(rr)       p15, 0, rr, c8, c7, 3 /* Invalidate 
unified TLB by MVA, all ASID */
+#endif
+
+/*
+ * CP15 C10 registers
+ */
+/* Without LPAE this is PRRR, with LPAE it's MAIR0 */
+#define        CP15_PRRR(rr)           p15, 0, rr, c10, c2, 0 /* Primary 
Region Remap Register */
+#define        CP15_MAIR0(rr)          p15, 0, rr, c10, c2, 0 /* Memory 
Attribute Indirection Register 0 */
+/* Without LPAE this is NMRR, with LPAE it's MAIR1 */
+#define        CP15_NMRR(rr)           p15, 0, rr, c10, c2, 1 /* Normal Memory 
Remap Register */
+#define        CP15_MAIR1(rr)          p15, 0, rr, c10, c2, 1 /* Memory 
Attribute Indirection Register 1 */
+
+#define        CP15_AMAIR0(rr)         p15, 0, rr, c10, c3, 0 /* Auxiliary 
Memory Attribute Indirection Register 0 */
+#define        CP15_AMAIR1(rr)         p15, 0, rr, c10, c3, 1 /* Auxiliary 
Memory Attribute Indirection Register 1 */
+
+/*
+ * CP15 C12 registers
+ */
+#define        CP15_VBAR(rr)           p15, 0, rr, c12, c0, 0 /* Vector Base 
Address Register */
+#define        CP15_MVBAR(rr)          p15, 0, rr, c12, c0, 1 /* Monitor 
Vector Base Address Register */
+
+#define        CP15_ISR(rr)            p15, 0, rr, c12, c1, 0 /* Interrupt 
Status Register */
+
+/*
+ * CP15 C13 registers
+ */
+#define        CP15_FCSEIDR(rr)        p15, 0, rr, c13, c0, 0 /* FCSE Process 
ID Register */
+#define        CP15_CONTEXTIDR(rr)     p15, 0, rr, c13, c0, 1 /* Context ID 
Register */
+#define        CP15_TPIDRURW(rr)       p15, 0, rr, c13, c0, 2 /* User 
Read/Write Thread ID Register */
+#define        CP15_TPIDRURO(rr)       p15, 0, rr, c13, c0, 3 /* User 
Read-Only Thread ID Register */
+#define        CP15_TPIDRPRW(rr)       p15, 0, rr, c13, c0, 4 /* PL1 only 
Thread ID Register */
+
+#endif /* !MACHINE_SYSREG_H */
_______________________________________________
[email protected] mailing list
http://lists.freebsd.org/mailman/listinfo/svn-src-head
To unsubscribe, send any mail to "[email protected]"

Reply via email to