[tip:x86/debug] x86/asm/crypto: Create stack frames in crypto functions

2016-02-24 Thread tip-bot for Josh Poimboeuf
Commit-ID:  8691ccd764f9ecc69a6812dfe76214c86ac9ba06
Gitweb: http://git.kernel.org/tip/8691ccd764f9ecc69a6812dfe76214c86ac9ba06
Author: Josh Poimboeuf 
AuthorDate: Thu, 21 Jan 2016 16:49:19 -0600
Committer:  Ingo Molnar 
CommitDate: Wed, 24 Feb 2016 08:35:43 +0100

x86/asm/crypto: Create stack frames in crypto functions

The crypto code has several callable non-leaf functions which don't
honor CONFIG_FRAME_POINTER, which can result in bad stack traces.

Create stack frames for them when CONFIG_FRAME_POINTER is enabled.

Signed-off-by: Josh Poimboeuf 
Cc: Andrew Morton 
Cc: Andy Lutomirski 
Cc: Andy Lutomirski 
Cc: Arnaldo Carvalho de Melo 
Cc: Bernd Petrovitsch 
Cc: Borislav Petkov 
Cc: Brian Gerst 
Cc: Chris J Arges 
Cc: David S. Miller 
Cc: Denys Vlasenko 
Cc: H. Peter Anvin 
Cc: Herbert Xu 
Cc: Jiri Slaby 
Cc: Linus Torvalds 
Cc: Michal Marek 
Cc: Namhyung Kim 
Cc: Pedro Alves 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: live-patch...@vger.kernel.org
Link: 
http://lkml.kernel.org/r/6c20192bcf1102ae18ae5a242cabf30ce9b29895.1453405861.git.jpoim...@redhat.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/crypto/aesni-intel_asm.S| 73 +++-
 arch/x86/crypto/camellia-aesni-avx-asm_64.S  | 15 +
 arch/x86/crypto/camellia-aesni-avx2-asm_64.S | 15 +
 arch/x86/crypto/cast5-avx-x86_64-asm_64.S|  9 +++
 arch/x86/crypto/cast6-avx-x86_64-asm_64.S| 13 +
 arch/x86/crypto/ghash-clmulni-intel_asm.S|  5 ++
 arch/x86/crypto/serpent-avx-x86_64-asm_64.S  | 13 +
 arch/x86/crypto/serpent-avx2-asm_64.S| 13 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_flush_avx2.S  |  3 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_submit_avx2.S |  3 +
 arch/x86/crypto/twofish-avx-x86_64-asm_64.S  | 13 +
 11 files changed, 148 insertions(+), 27 deletions(-)

diff --git a/arch/x86/crypto/aesni-intel_asm.S 
b/arch/x86/crypto/aesni-intel_asm.S
index c44cfed..383a6f8 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -31,6 +31,7 @@
 
 #include 
 #include 
+#include 
 
 /*
  * The following macros are used to move an (un)aligned 16 byte value to/from
@@ -1800,11 +1801,12 @@ ENDPROC(_key_expansion_256b)
  *   unsigned int key_len)
  */
 ENTRY(aesni_set_key)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
-   movl 8(%esp), KEYP  # ctx
-   movl 12(%esp), UKEYP# in_key
-   movl 16(%esp), %edx # key_len
+   movl (FRAME_OFFSET+8)(%esp), KEYP   # ctx
+   movl (FRAME_OFFSET+12)(%esp), UKEYP # in_key
+   movl (FRAME_OFFSET+16)(%esp), %edx  # key_len
 #endif
movups (UKEYP), %xmm0   # user key (first 16 bytes)
movaps %xmm0, (KEYP)
@@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key)
 #ifndef __x86_64__
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_set_key)
 
@@ -1912,12 +1915,13 @@ ENDPROC(aesni_set_key)
  * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
movl 480(KEYP), KLEN# key length
movups (INP), STATE # input
@@ -1927,6 +1931,7 @@ ENTRY(aesni_enc)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_enc)
 
@@ -2101,12 +2106,13 @@ ENDPROC(_aesni_enc4)
  * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_dec)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
mov 480(KEYP), KLEN # key length
add $240, KEYP
@@ -2117,6 +2123,7 @@ ENTRY(aesni_dec)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_dec)
 
@@ -2292,14 +2299,15 @@ ENDPROC(_aesni_dec4)
  *   size_t len)
  */
 ENTRY(aesni_ecb_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl LEN
pushl KEYP
pushl 

[tip:x86/debug] x86/asm/crypto: Create stack frames in crypto functions

2016-02-24 Thread tip-bot for Josh Poimboeuf
Commit-ID:  8691ccd764f9ecc69a6812dfe76214c86ac9ba06
Gitweb: http://git.kernel.org/tip/8691ccd764f9ecc69a6812dfe76214c86ac9ba06
Author: Josh Poimboeuf 
AuthorDate: Thu, 21 Jan 2016 16:49:19 -0600
Committer:  Ingo Molnar 
CommitDate: Wed, 24 Feb 2016 08:35:43 +0100

x86/asm/crypto: Create stack frames in crypto functions

The crypto code has several callable non-leaf functions which don't
honor CONFIG_FRAME_POINTER, which can result in bad stack traces.

Create stack frames for them when CONFIG_FRAME_POINTER is enabled.

Signed-off-by: Josh Poimboeuf 
Cc: Andrew Morton 
Cc: Andy Lutomirski 
Cc: Andy Lutomirski 
Cc: Arnaldo Carvalho de Melo 
Cc: Bernd Petrovitsch 
Cc: Borislav Petkov 
Cc: Brian Gerst 
Cc: Chris J Arges 
Cc: David S. Miller 
Cc: Denys Vlasenko 
Cc: H. Peter Anvin 
Cc: Herbert Xu 
Cc: Jiri Slaby 
Cc: Linus Torvalds 
Cc: Michal Marek 
Cc: Namhyung Kim 
Cc: Pedro Alves 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: live-patch...@vger.kernel.org
Link: 
http://lkml.kernel.org/r/6c20192bcf1102ae18ae5a242cabf30ce9b29895.1453405861.git.jpoim...@redhat.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/crypto/aesni-intel_asm.S| 73 +++-
 arch/x86/crypto/camellia-aesni-avx-asm_64.S  | 15 +
 arch/x86/crypto/camellia-aesni-avx2-asm_64.S | 15 +
 arch/x86/crypto/cast5-avx-x86_64-asm_64.S|  9 +++
 arch/x86/crypto/cast6-avx-x86_64-asm_64.S| 13 +
 arch/x86/crypto/ghash-clmulni-intel_asm.S|  5 ++
 arch/x86/crypto/serpent-avx-x86_64-asm_64.S  | 13 +
 arch/x86/crypto/serpent-avx2-asm_64.S| 13 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_flush_avx2.S  |  3 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_submit_avx2.S |  3 +
 arch/x86/crypto/twofish-avx-x86_64-asm_64.S  | 13 +
 11 files changed, 148 insertions(+), 27 deletions(-)

diff --git a/arch/x86/crypto/aesni-intel_asm.S 
b/arch/x86/crypto/aesni-intel_asm.S
index c44cfed..383a6f8 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -31,6 +31,7 @@
 
 #include 
 #include 
+#include 
 
 /*
  * The following macros are used to move an (un)aligned 16 byte value to/from
@@ -1800,11 +1801,12 @@ ENDPROC(_key_expansion_256b)
  *   unsigned int key_len)
  */
 ENTRY(aesni_set_key)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
-   movl 8(%esp), KEYP  # ctx
-   movl 12(%esp), UKEYP# in_key
-   movl 16(%esp), %edx # key_len
+   movl (FRAME_OFFSET+8)(%esp), KEYP   # ctx
+   movl (FRAME_OFFSET+12)(%esp), UKEYP # in_key
+   movl (FRAME_OFFSET+16)(%esp), %edx  # key_len
 #endif
movups (UKEYP), %xmm0   # user key (first 16 bytes)
movaps %xmm0, (KEYP)
@@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key)
 #ifndef __x86_64__
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_set_key)
 
@@ -1912,12 +1915,13 @@ ENDPROC(aesni_set_key)
  * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
movl 480(KEYP), KLEN# key length
movups (INP), STATE # input
@@ -1927,6 +1931,7 @@ ENTRY(aesni_enc)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_enc)
 
@@ -2101,12 +2106,13 @@ ENDPROC(_aesni_enc4)
  * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_dec)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
mov 480(KEYP), KLEN # key length
add $240, KEYP
@@ -2117,6 +2123,7 @@ ENTRY(aesni_dec)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_dec)
 
@@ -2292,14 +2299,15 @@ ENDPROC(_aesni_dec4)
  *   size_t len)
  */
 ENTRY(aesni_ecb_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl LEN
pushl KEYP
pushl KLEN
-   movl 16(%esp), KEYP
-   movl 20(%esp), OUTP
-   movl 24(%esp), INP
-   movl 28(%esp), LEN
+   movl (FRAME_OFFSET+16)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+20)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+24)(%esp), INP   # src
+   movl (FRAME_OFFSET+28)(%esp), LEN   # len
 #endif
test LEN, LEN   # check length
jz .Lecb_enc_ret
@@ -2342,6 +2350,7 @@ ENTRY(aesni_ecb_enc)
popl KEYP

[tip:x86/debug] x86/asm/crypto: Create stack frames in crypto functions

2016-02-23 Thread tip-bot for Josh Poimboeuf
Commit-ID:  c24cf96a589107cb29c1d2cfe9c42d43e3f68654
Gitweb: http://git.kernel.org/tip/c24cf96a589107cb29c1d2cfe9c42d43e3f68654
Author: Josh Poimboeuf 
AuthorDate: Thu, 21 Jan 2016 16:49:19 -0600
Committer:  Ingo Molnar 
CommitDate: Tue, 23 Feb 2016 09:03:57 +0100

x86/asm/crypto: Create stack frames in crypto functions

The crypto code has several callable non-leaf functions which don't
honor CONFIG_FRAME_POINTER, which can result in bad stack traces.

Create stack frames for them when CONFIG_FRAME_POINTER is enabled.

Signed-off-by: Josh Poimboeuf 
Cc: Andrew Morton 
Cc: Andy Lutomirski 
Cc: Andy Lutomirski 
Cc: Arnaldo Carvalho de Melo 
Cc: Bernd Petrovitsch 
Cc: Borislav Petkov 
Cc: Brian Gerst 
Cc: Chris J Arges 
Cc: David S. Miller 
Cc: Denys Vlasenko 
Cc: H. Peter Anvin 
Cc: Herbert Xu 
Cc: Jiri Slaby 
Cc: Linus Torvalds 
Cc: Michal Marek 
Cc: Namhyung Kim 
Cc: Pedro Alves 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: live-patch...@vger.kernel.org
Link: 
http://lkml.kernel.org/r/6c20192bcf1102ae18ae5a242cabf30ce9b29895.1453405861.git.jpoim...@redhat.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/crypto/aesni-intel_asm.S| 73 +++-
 arch/x86/crypto/camellia-aesni-avx-asm_64.S  | 15 +
 arch/x86/crypto/camellia-aesni-avx2-asm_64.S | 15 +
 arch/x86/crypto/cast5-avx-x86_64-asm_64.S|  9 +++
 arch/x86/crypto/cast6-avx-x86_64-asm_64.S| 13 +
 arch/x86/crypto/ghash-clmulni-intel_asm.S|  5 ++
 arch/x86/crypto/serpent-avx-x86_64-asm_64.S  | 13 +
 arch/x86/crypto/serpent-avx2-asm_64.S| 13 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_flush_avx2.S  |  3 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_submit_avx2.S |  3 +
 arch/x86/crypto/twofish-avx-x86_64-asm_64.S  | 13 +
 11 files changed, 148 insertions(+), 27 deletions(-)

diff --git a/arch/x86/crypto/aesni-intel_asm.S 
b/arch/x86/crypto/aesni-intel_asm.S
index c44cfed..383a6f8 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -31,6 +31,7 @@
 
 #include 
 #include 
+#include 
 
 /*
  * The following macros are used to move an (un)aligned 16 byte value to/from
@@ -1800,11 +1801,12 @@ ENDPROC(_key_expansion_256b)
  *   unsigned int key_len)
  */
 ENTRY(aesni_set_key)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
-   movl 8(%esp), KEYP  # ctx
-   movl 12(%esp), UKEYP# in_key
-   movl 16(%esp), %edx # key_len
+   movl (FRAME_OFFSET+8)(%esp), KEYP   # ctx
+   movl (FRAME_OFFSET+12)(%esp), UKEYP # in_key
+   movl (FRAME_OFFSET+16)(%esp), %edx  # key_len
 #endif
movups (UKEYP), %xmm0   # user key (first 16 bytes)
movaps %xmm0, (KEYP)
@@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key)
 #ifndef __x86_64__
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_set_key)
 
@@ -1912,12 +1915,13 @@ ENDPROC(aesni_set_key)
  * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
movl 480(KEYP), KLEN# key length
movups (INP), STATE # input
@@ -1927,6 +1931,7 @@ ENTRY(aesni_enc)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_enc)
 
@@ -2101,12 +2106,13 @@ ENDPROC(_aesni_enc4)
  * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_dec)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
mov 480(KEYP), KLEN # key length
add $240, KEYP
@@ -2117,6 +2123,7 @@ ENTRY(aesni_dec)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_dec)
 
@@ -2292,14 +2299,15 @@ ENDPROC(_aesni_dec4)
  *   size_t len)
  */
 ENTRY(aesni_ecb_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl LEN
pushl KEYP
pushl 

[tip:x86/debug] x86/asm/crypto: Create stack frames in crypto functions

2016-02-23 Thread tip-bot for Josh Poimboeuf
Commit-ID:  c24cf96a589107cb29c1d2cfe9c42d43e3f68654
Gitweb: http://git.kernel.org/tip/c24cf96a589107cb29c1d2cfe9c42d43e3f68654
Author: Josh Poimboeuf 
AuthorDate: Thu, 21 Jan 2016 16:49:19 -0600
Committer:  Ingo Molnar 
CommitDate: Tue, 23 Feb 2016 09:03:57 +0100

x86/asm/crypto: Create stack frames in crypto functions

The crypto code has several callable non-leaf functions which don't
honor CONFIG_FRAME_POINTER, which can result in bad stack traces.

Create stack frames for them when CONFIG_FRAME_POINTER is enabled.

Signed-off-by: Josh Poimboeuf 
Cc: Andrew Morton 
Cc: Andy Lutomirski 
Cc: Andy Lutomirski 
Cc: Arnaldo Carvalho de Melo 
Cc: Bernd Petrovitsch 
Cc: Borislav Petkov 
Cc: Brian Gerst 
Cc: Chris J Arges 
Cc: David S. Miller 
Cc: Denys Vlasenko 
Cc: H. Peter Anvin 
Cc: Herbert Xu 
Cc: Jiri Slaby 
Cc: Linus Torvalds 
Cc: Michal Marek 
Cc: Namhyung Kim 
Cc: Pedro Alves 
Cc: Peter Zijlstra 
Cc: Thomas Gleixner 
Cc: live-patch...@vger.kernel.org
Link: 
http://lkml.kernel.org/r/6c20192bcf1102ae18ae5a242cabf30ce9b29895.1453405861.git.jpoim...@redhat.com
Signed-off-by: Ingo Molnar 
---
 arch/x86/crypto/aesni-intel_asm.S| 73 +++-
 arch/x86/crypto/camellia-aesni-avx-asm_64.S  | 15 +
 arch/x86/crypto/camellia-aesni-avx2-asm_64.S | 15 +
 arch/x86/crypto/cast5-avx-x86_64-asm_64.S|  9 +++
 arch/x86/crypto/cast6-avx-x86_64-asm_64.S| 13 +
 arch/x86/crypto/ghash-clmulni-intel_asm.S|  5 ++
 arch/x86/crypto/serpent-avx-x86_64-asm_64.S  | 13 +
 arch/x86/crypto/serpent-avx2-asm_64.S| 13 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_flush_avx2.S  |  3 +
 arch/x86/crypto/sha-mb/sha1_mb_mgr_submit_avx2.S |  3 +
 arch/x86/crypto/twofish-avx-x86_64-asm_64.S  | 13 +
 11 files changed, 148 insertions(+), 27 deletions(-)

diff --git a/arch/x86/crypto/aesni-intel_asm.S 
b/arch/x86/crypto/aesni-intel_asm.S
index c44cfed..383a6f8 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -31,6 +31,7 @@
 
 #include 
 #include 
+#include 
 
 /*
  * The following macros are used to move an (un)aligned 16 byte value to/from
@@ -1800,11 +1801,12 @@ ENDPROC(_key_expansion_256b)
  *   unsigned int key_len)
  */
 ENTRY(aesni_set_key)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
-   movl 8(%esp), KEYP  # ctx
-   movl 12(%esp), UKEYP# in_key
-   movl 16(%esp), %edx # key_len
+   movl (FRAME_OFFSET+8)(%esp), KEYP   # ctx
+   movl (FRAME_OFFSET+12)(%esp), UKEYP # in_key
+   movl (FRAME_OFFSET+16)(%esp), %edx  # key_len
 #endif
movups (UKEYP), %xmm0   # user key (first 16 bytes)
movaps %xmm0, (KEYP)
@@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key)
 #ifndef __x86_64__
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_set_key)
 
@@ -1912,12 +1915,13 @@ ENDPROC(aesni_set_key)
  * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
movl 480(KEYP), KLEN# key length
movups (INP), STATE # input
@@ -1927,6 +1931,7 @@ ENTRY(aesni_enc)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_enc)
 
@@ -2101,12 +2106,13 @@ ENDPROC(_aesni_enc4)
  * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_dec)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl KEYP
pushl KLEN
-   movl 12(%esp), KEYP
-   movl 16(%esp), OUTP
-   movl 20(%esp), INP
+   movl (FRAME_OFFSET+12)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+16)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+20)(%esp), INP   # src
 #endif
mov 480(KEYP), KLEN # key length
add $240, KEYP
@@ -2117,6 +2123,7 @@ ENTRY(aesni_dec)
popl KLEN
popl KEYP
 #endif
+   FRAME_END
ret
 ENDPROC(aesni_dec)
 
@@ -2292,14 +2299,15 @@ ENDPROC(_aesni_dec4)
  *   size_t len)
  */
 ENTRY(aesni_ecb_enc)
+   FRAME_BEGIN
 #ifndef __x86_64__
pushl LEN
pushl KEYP
pushl KLEN
-   movl 16(%esp), KEYP
-   movl 20(%esp), OUTP
-   movl 24(%esp), INP
-   movl 28(%esp), LEN
+   movl (FRAME_OFFSET+16)(%esp), KEYP  # ctx
+   movl (FRAME_OFFSET+20)(%esp), OUTP  # dst
+   movl (FRAME_OFFSET+24)(%esp), INP   # src
+   movl (FRAME_OFFSET+28)(%esp), LEN   # len
 #endif
test LEN, LEN   # check length
jz .Lecb_enc_ret
@@ -2342,6 +2350,7 @@ ENTRY(aesni_ecb_enc)
popl KEYP