Fix two bugs in .Lcbc_slow_enc_in_place. - At end of .Lcbc_slow_enc_in_place, %r10 instead of $_len should be set to 16. - In .Lcbc_slow_enc_in_place, %rdi should be initialized before stosb.
Signed-off-by: Huang Ying <ying.hu...@intel.com> --- crypto/aes/asm/aes-x86_64.pl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) --- a/crypto/aes/asm/aes-x86_64.pl +++ b/crypto/aes/asm/aes-x86_64.pl @@ -1994,10 +1994,12 @@ AES_cbc_encrypt: .Lcbc_slow_enc_in_place: mov \$16,%rcx # zero tail sub %r10,%rcx + mov $out,%rdi + add %r10,%rdi xor %rax,%rax .long 0x9066AAF3 # rep stosb mov $out,$inp # this is not a mistake! - movq \$16,$_len # len=16 + movq \$16,%r10 # len=16 jmp .Lcbc_slow_enc_loop # one more spin... #--------------------------- SLOW DECRYPT ---------------------------# .align 16
signature.asc
Description: PGP signature