Re: [PATCH 1/2] armv8/cache.S: Read sysreg fields through ubfx

2021-09-23 Thread Tom Rini
On Fri, Aug 27, 2021 at 06:03:45PM +0200, Pierre-Clément Tosi wrote:

> Improve the file's readability and conciseness by using the appropriate
> Aarch64 instruction: ubfx (unsigned bitfield extract). This makes the
> code easier to follow as it directly manipulates the offsets and widths
> of the fields read from system registers, as they are expressed in the
> Standard (ARM ARM). This has the added benefit (albeit arguably
> negligible) of reducing the final code size.
> 
> Signed-off-by: Pierre-Clément Tosi 

Applied to u-boot/next, thanks!

-- 
Tom


signature.asc
Description: PGP signature


[PATCH 1/2] armv8/cache.S: Read sysreg fields through ubfx

2021-08-27 Thread Pierre-Clément Tosi
Improve the file's readability and conciseness by using the appropriate
Aarch64 instruction: ubfx (unsigned bitfield extract). This makes the
code easier to follow as it directly manipulates the offsets and widths
of the fields read from system registers, as they are expressed in the
Standard (ARM ARM). This has the added benefit (albeit arguably
negligible) of reducing the final code size.

Signed-off-by: Pierre-Clément Tosi 
---
 arch/arm/cpu/armv8/cache.S | 16 ++--
 1 file changed, 6 insertions(+), 10 deletions(-)

diff --git a/arch/arm/cpu/armv8/cache.S b/arch/arm/cpu/armv8/cache.S
index 443d94c262..aabb3dff61 100644
--- a/arch/arm/cpu/armv8/cache.S
+++ b/arch/arm/cpu/armv8/cache.S
@@ -27,13 +27,11 @@ ENTRY(__asm_dcache_level)
msr csselr_el1, x12 /* select cache level */
isb /* sync change of cssidr_el1 */
mrs x6, ccsidr_el1  /* read the new cssidr_el1 */
-   and x2, x6, #7  /* x2 <- log2(cache line size)-4 */
+   ubfxx2, x6,  #0,  #3/* x2 <- log2(cache line size)-4 */
+   ubfxx3, x6,  #3, #10/* x3 <- number of cache ways - 1 */
+   ubfxx4, x6, #13, #15/* x4 <- number of cache sets - 1 */
add x2, x2, #4  /* x2 <- log2(cache line size) */
-   mov x3, #0x3ff
-   and x3, x3, x6, lsr #3  /* x3 <- max number of #ways */
clz w5, w3  /* bit position of #ways */
-   mov x4, #0x7fff
-   and x4, x4, x6, lsr #13 /* x4 <- max number of #sets */
/* x12 <- cache level << 1 */
/* x2 <- line length offset */
/* x3 <- number of cache ways - 1 */
@@ -72,8 +70,7 @@ ENTRY(__asm_dcache_all)
mov x1, x0
dsb sy
mrs x10, clidr_el1  /* read clidr_el1 */
-   lsr x11, x10, #24
-   and x11, x11, #0x7  /* x11 <- loc */
+   ubfxx11, x10, #24, #3   /* x11 <- loc */
cbz x11, finished   /* if loc is 0, exit */
mov x15, lr
mov x0, #0  /* start flush at cache level 0 */
@@ -131,8 +128,7 @@ ENDPROC(__asm_invalidate_dcache_all)
 .pushsection .text.__asm_flush_dcache_range, "ax"
 ENTRY(__asm_flush_dcache_range)
mrs x3, ctr_el0
-   lsr x3, x3, #16
-   and x3, x3, #0xf
+   ubfxx3, x3, #16, #4
mov x2, #4
lsl x2, x2, x3  /* cache line size */
 
@@ -158,7 +154,7 @@ ENDPROC(__asm_flush_dcache_range)
 .pushsection .text.__asm_invalidate_dcache_range, "ax"
 ENTRY(__asm_invalidate_dcache_range)
mrs x3, ctr_el0
-   ubfmx3, x3, #16, #19
+   ubfxx3, x3, #16, #4
mov x2, #4
lsl x2, x2, x3  /* cache line size */
 
-- 
2.33.0.259.gc128427fd7-goog


-- 
Pierre