Use the SRS (Store Return State) instruction if available.  This
considerably simplifies the context save and restore.
---
 cpukit/score/cpu/arm/arm_exc_interrupt.S      | 45 +++++++++++++++++--
 .../score/cpu/arm/include/rtems/score/arm.h   |  1 +
 .../cpu/arm/include/rtems/score/cpuimpl.h     | 13 ++++++
 3 files changed, 56 insertions(+), 3 deletions(-)

diff --git a/cpukit/score/cpu/arm/arm_exc_interrupt.S 
b/cpukit/score/cpu/arm/arm_exc_interrupt.S
index 43568747b1..1dc8c6eab4 100644
--- a/cpukit/score/cpu/arm/arm_exc_interrupt.S
+++ b/cpukit/score/cpu/arm/arm_exc_interrupt.S
@@ -34,6 +34,11 @@
 
 #ifdef ARM_MULTILIB_ARCH_V4
 
+#define SELF_CPU_CONTROL r7
+#define NON_VOLATILE_SCRATCH r9
+
+#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+
 #define EXCHANGE_LR r4
 #define EXCHANGE_SPSR r5
 #define EXCHANGE_CPSR r6
@@ -42,16 +47,31 @@
 #define EXCHANGE_LIST {EXCHANGE_LR, EXCHANGE_SPSR, EXCHANGE_CPSR, 
EXCHANGE_INT_SP}
 #define EXCHANGE_SIZE 16
 
-#define SELF_CPU_CONTROL r7
-#define NON_VOLATILE_SCRATCH r9
-
 #define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, 
SELF_CPU_CONTROL, r12}
 #define CONTEXT_SIZE 32
 
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
+
 .arm
 .globl _ARMV4_Exception_interrupt
 _ARMV4_Exception_interrupt:
 
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+       /* Prepare return from interrupt */
+       sub     lr, lr, #4
+
+       /* Save LR_irq and SPSR_irq to the SVC stack */
+       srsfd   sp!, #ARM_PSR_M_SVC
+
+       /* Switch to SVC mode */
+       cps     #ARM_PSR_M_SVC
+
+       /*
+        * Save the volatile registers, two non-volatile registers used for
+        * interrupt processing, and the link register.
+        */
+       push    {r0-r3, SELF_CPU_CONTROL, NON_VOLATILE_SCRATCH, r12, lr}
+#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
        /* Save exchange registers to exchange area */
        stmdb   sp, EXCHANGE_LIST
 
@@ -73,6 +93,7 @@ _ARMV4_Exception_interrupt:
         */
        stmdb   sp!, CONTEXT_LIST
        stmdb   sp!, {NON_VOLATILE_SCRATCH, lr}
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
 
 #ifdef ARM_MULTILIB_VFP
        /* Save VFP context */
@@ -87,11 +108,13 @@ _ARMV4_Exception_interrupt:
        /* Get per-CPU control of current processor */
        GET_SELF_CPU_CONTROL    SELF_CPU_CONTROL
 
+#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE
        /* Remember INT stack pointer */
        mov     r1, EXCHANGE_INT_SP
 
        /* Restore exchange registers from exchange area */
        ldmia   r1, EXCHANGE_LIST
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
 
        /* Get interrupt nest level */
        ldr     r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
@@ -99,7 +122,11 @@ _ARMV4_Exception_interrupt:
        /* Switch stack if necessary and save original stack pointer */
        mov     NON_VOLATILE_SCRATCH, sp
        cmp     r2, #0
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+       ldreq   sp, [SELF_CPU_CONTROL, #PER_CPU_INTERRUPT_STACK_HIGH]
+#else
        moveq   sp, r1
+#endif
 
        /* Increment interrupt nest and thread dispatch disable level */
        ldr     r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
@@ -208,6 +235,13 @@ _ARMV4_Exception_interrupt:
        vmsr    FPSCR, r0
 #endif /* ARM_MULTILIB_VFP */
 
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+       /*
+        * Restore the volatile registers, two non-volatile registers used for
+        * interrupt processing, and the link register.
+        */
+       pop     {r0-r3, SELF_CPU_CONTROL, NON_VOLATILE_SCRATCH, r12, lr}
+#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
        /* Restore NON_VOLATILE_SCRATCH register and link register */
        ldmia   sp!, {NON_VOLATILE_SCRATCH, lr}
 
@@ -238,6 +272,7 @@ _ARMV4_Exception_interrupt:
 
        /* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */
        ldmia   sp!, {EXCHANGE_LR, EXCHANGE_SPSR}
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
 
 #ifdef ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE
        /*
@@ -267,7 +302,11 @@ _ARMV4_Exception_interrupt:
 #endif
 
        /* Return from interrupt */
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+       rfefd   sp!
+#else
        subs    pc, lr, #4
+#endif
 
 #ifdef RTEMS_PROFILING
 #ifdef __thumb2__
diff --git a/cpukit/score/cpu/arm/include/rtems/score/arm.h 
b/cpukit/score/cpu/arm/include/rtems/score/arm.h
index b1e4b07a37..7eaa69d889 100644
--- a/cpukit/score/cpu/arm/include/rtems/score/arm.h
+++ b/cpukit/score/cpu/arm/include/rtems/score/arm.h
@@ -47,6 +47,7 @@ extern "C" {
   #define ARM_MULTILIB_HAS_WFI
   #define ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE
   #define ARM_MULTILIB_HAS_BARRIER_INSTRUCTIONS
+  #define ARM_MULTILIB_HAS_STORE_RETURN_STATE
 #endif
 
 #ifndef ARM_DISABLE_THREAD_ID_REGISTER_USE
diff --git a/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h 
b/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h
index 0f86710966..a6fe74e9ad 100644
--- a/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h
+++ b/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h
@@ -79,6 +79,18 @@ typedef struct {
   double d6;
   double d7;
 #endif /* ARM_MULTILIB_VFP */
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+  uint32_t r0;
+  uint32_t r1;
+  uint32_t r2;
+  uint32_t r3;
+  uint32_t r7;
+  uint32_t r9;
+  uint32_t r12;
+  uint32_t lr;
+  uint32_t return_pc;
+  uint32_t return_cpsr;
+#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
   uint32_t r9;
   uint32_t lr;
   uint32_t r0;
@@ -89,6 +101,7 @@ typedef struct {
   uint32_t return_cpsr;
   uint32_t r7;
   uint32_t r12;
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
 } CPU_Interrupt_frame;
 
 #ifdef RTEMS_SMP
-- 
2.31.1

_______________________________________________
devel mailing list
devel@rtems.org
http://lists.rtems.org/mailman/listinfo/devel

Reply via email to