Issue |
56567
|
Summary |
armv7-a thumb clang reorders ldr before the first inline asm in __attribute__((naked)) function
|
Labels |
new issue
|
Assignees |
|
Reporter |
k15tfu
|
Hi!
Compile the following code with `-O2 -fPIC -march=armv7-a -mthumb`:
```
void * this_ptr;
void (*call_enter_ptr)(void * this_);
__attribute__((naked))
void raw_call_enter() {
#if defined(__aarch64__)
asm volatile(
"stp x0, x1, [sp, #-16]!\n"
// ...
: : :
);
asm volatile("" : : : "memory"); // compiler barrier
asm volatile(
"ldr x0, %1\n"
"blr %0\n"
: : "r"(call_enter_ptr), "m"(this_ptr) : "x0"
);
asm volatile(
// ...
"ldp x0, x1, [sp], #16\n"
: : :
);
asm volatile(
"ret"
);
#elif defined(__arm__)
asm volatile(
"push {r0-r3, r12, lr}\n"
: : :
);
asm volatile("" : : : "memory"); // compiler barrier
asm volatile(
"ldr r0, %1\n"
"blx %0\n"
: : "r"(call_enter_ptr), "m"(this_ptr) : "r0"
);
asm volatile(
"pop {r0-r3, r12, lr}\n"
: : :
);
asm volatile(
"bx lr"
);
#endif
}
```
Output:
```
raw_call_enter():
ldr r0, .LCPI0_0
ldr r1, .LCPI0_1
push.w {r0, r1, r2, r3, r12, lr} <-- and now r0 & r1 are invalid
.LPC0_0:
add r0, pc
.LPC0_1:
add r1, pc
ldr r0, [r0]
ldr r1, [r1]
ldr r2, [r0]
ldr r0, [r1]
blx r2
pop.w {r0, r1, r2, r3, r12, lr}
bx lr
.LCPI0_0:
.Ltmp1:
.long call_enter_ptr(GOT_PREL)-((.LPC0_0+4)-.Ltmp1)
.LCPI0_1:
.Ltmp2:
.long this_ptr(GOT_PREL)-((.LPC0_1+4)-.Ltmp2)
this_ptr:
.long 0
call_enter_ptr:
.long 0
```
armv7-a clang 11.0.1
https://godbolt.org/z/d9dMndvsj
_______________________________________________
llvm-bugs mailing list
llvm-bugs@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/llvm-bugs