arm_early_mmu_cache_invalidate now clobbers x0, x1, x2, which might be
passed by a previous stage bootloader. Have the caller save them.
---
Rouven, does this work for you?
---
 arch/arm/cpu/entry_ll_64.S | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/arch/arm/cpu/entry_ll_64.S b/arch/arm/cpu/entry_ll_64.S
index 37e0cb66b549..41d6cfb6a851 100644
--- a/arch/arm/cpu/entry_ll_64.S
+++ b/arch/arm/cpu/entry_ll_64.S
@@ -10,14 +10,16 @@
 .section .text.__barebox_arm_entry
 ENTRY(__barebox_arm_entry)
        mov     sp, x3
-       /*
-        * arm_early_mmu_cache_invalidate is jsut a call to
-        * v8_invalidate_icache_all() which doesn't clobber x0, x1 or x2
-        */
+       mov     x4, x0
+       mov     x5, x1
+       mov     x6, x2
        bl      arm_early_mmu_cache_invalidate
+       mov     x0, x4
+       mov     x1, x5
+       mov     x2, x6
 #if IS_ENABLED(CONFIG_PBL_IMAGE)
        b       barebox_pbl_start
 #else
        b       barebox_non_pbl_start
 #endif
-ENDPROC(__barebox_arm_entry)
\ No newline at end of file
+ENDPROC(__barebox_arm_entry)
-- 
2.23.0


_______________________________________________
barebox mailing list
[email protected]
http://lists.infradead.org/mailman/listinfo/barebox

Reply via email to