Previously we misaligned stack by 8 in startup.S and compensated
for it in callwrap.S. According to ABI docs (EFI and sysv amd64)
right behaviour is to align stack in startup.S and keep it aligned
in callwrap.S. startup.S part was committed few commits before. This
takes care of callwrap.S.
Reported by: Gary Lin.
+2013-11-22 Vladimir Serbinenko <phcoder@gmail.com>
+
+ * grub-core/kern/x86_64/efi/callwrap.S: Fix stack alignment.
+ Previously we misaligned stack by 8 in startup.S and compensated
+ for it in callwrap.S. According to ABI docs (EFI and sysv amd64)
+ right behaviour is to align stack in startup.S and keep it aligned
+ in callwrap.S. startup.S part was committed few commits before. This
+ takes care of callwrap.S.
+ Reported by: Gary Lin.
+
2013-11-22 Vladimir Serbinenko <phcoder@gmail.com>
* grub-core/boot/mips/startup_raw.S: Handle the case of gap between
.text
FUNCTION(efi_wrap_0)
- subq $48, %rsp
+ subq $40, %rsp
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_1)
- subq $48, %rsp
+ subq $40, %rsp
mov %rsi, %rcx
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_2)
- subq $48, %rsp
+ subq $40, %rsp
mov %rsi, %rcx
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_3)
- subq $48, %rsp
+ subq $40, %rsp
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_4)
- subq $48, %rsp
+ subq $40, %rsp
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_5)
- subq $48, %rsp
+ subq $40, %rsp
mov %r9, 32(%rsp)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $48, %rsp
+ addq $40, %rsp
ret
FUNCTION(efi_wrap_6)
- subq $64, %rsp
- mov 64+8(%rsp), %rax
+ subq $56, %rsp
+ mov 56+8(%rsp), %rax
mov %rax, 40(%rsp)
mov %r9, 32(%rsp)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $64, %rsp
+ addq $56, %rsp
ret
FUNCTION(efi_wrap_7)
- subq $96, %rsp
- mov 96+16(%rsp), %rax
+ subq $88, %rsp
+ mov 88+16(%rsp), %rax
mov %rax, 48(%rsp)
- mov 96+8(%rsp), %rax
+ mov 88+8(%rsp), %rax
mov %rax, 40(%rsp)
mov %r9, 32(%rsp)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $96, %rsp
+ addq $88, %rsp
ret
FUNCTION(efi_wrap_10)
- subq $96, %rsp
- mov 96+40(%rsp), %rax
+ subq $88, %rsp
+ mov 88+40(%rsp), %rax
mov %rax, 72(%rsp)
- mov 96+32(%rsp), %rax
+ mov 88+32(%rsp), %rax
mov %rax, 64(%rsp)
- mov 96+24(%rsp), %rax
+ mov 88+24(%rsp), %rax
mov %rax, 56(%rsp)
- mov 96+16(%rsp), %rax
+ mov 88+16(%rsp), %rax
mov %rax, 48(%rsp)
- mov 96+8(%rsp), %rax
+ mov 88+8(%rsp), %rax
mov %rax, 40(%rsp)
mov %r9, 32(%rsp)
mov %r8, %r9
mov %rcx, %r8
mov %rsi, %rcx
call *%rdi
- addq $96, %rsp
+ addq $88, %rsp
ret