From: Vladimir 'phcoder' Serbinenko Date: Tue, 24 Aug 2010 06:57:18 +0000 (+0200) Subject: Unify and macroify some code in x86 relocators X-Git-Tag: 1.99~629^2~49 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=a797a26ee832a12c384da7c1a4404fa6ac626665;p=thirdparty%2Fgrub.git Unify and macroify some code in x86 relocators --- diff --git a/lib/i386/relocator16.S b/lib/i386/relocator16.S index 510d3a1ed..c3768f4eb 100644 --- a/lib/i386/relocator16.S +++ b/lib/i386/relocator16.S @@ -15,18 +15,7 @@ * You should have received a copy of the GNU General Public License * along with GRUB. If not, see . */ - -#include -#include - -#ifdef __x86_64__ -#define RAX %rax -#define RSI %rsi -#else -#define RAX %eax -#define RSI %esi -#endif - + /* The code segment of the protected mode. */ #define CODE_SEGMENT 0x08 @@ -37,50 +26,41 @@ #define PSEUDO_REAL_DSEG 0x20 +#include "relocator_common.S" + .p2align 4 /* force 16-byte alignment */ VARIABLE(grub_relocator16_start) -LOCAL(base): - /* %rax contains now our new 'base'. */ - mov RAX, RSI - add $(LOCAL(cont0) - LOCAL(base)), RAX - jmp *RAX -LOCAL(cont0): - lea (LOCAL(cont1) - LOCAL(base)) (RSI, 1), RAX - movl %eax, (LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) - - lea (LOCAL(gdt) - LOCAL(base)) (RSI, 1), RAX - mov RAX, (LOCAL(gdt_addr) - LOCAL(base)) (RSI, 1) + PREAMBLE movl %esi, %eax movw %ax, (LOCAL (cs_base_bytes12) - LOCAL (base)) (RSI, 1) shrl $16, %eax movb %al, (LOCAL (cs_base_byte3) - LOCAL (base)) (RSI, 1) - /* Switch to compatibility mode. */ - - lgdt (LOCAL(gdtdesc) - LOCAL(base)) (RSI, 1) - - /* Update %cs. */ - ljmp *(LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) - -LOCAL(cont1): + RELOAD_GDT .code32 + /* Update other registers. */ + movl $DATA_SEGMENT, %eax + movl %eax, %ds + movl %eax, %es + movl %eax, %fs + movl %eax, %gs + movl %eax, %ss - /* Disable paging. */ - movl %cr0, %eax - andl $(~GRUB_MEMORY_CPU_CR0_PAGING_ON), %eax - movl %eax, %cr0 + DISABLE_PAGING +#ifdef __x86_64__ /* Disable amd64. */ movl $GRUB_MEMORY_CPU_AMD64_MSR, %ecx rdmsr andl $(~GRUB_MEMORY_CPU_AMD64_MSR_ON), %eax wrmsr +#endif /* Turn off PAE. */ movl %cr4, %eax - andl $GRUB_MEMORY_CPU_CR4_PAE_ON, %eax + andl $(~GRUB_MEMORY_CPU_CR4_PAE_ON), %eax movl %eax, %cr4 /* Update other registers. */ @@ -208,23 +188,6 @@ LOCAL(cs_base_byte3): */ .word 0xFFFF, 0 .byte 0, 0x92, 0, 0 - - .p2align 4 -LOCAL(gdtdesc): - .word 0x27 -LOCAL(gdt_addr): -#ifdef __x86_64__ - /* Filled by the code. */ - .quad 0 -#else - /* Filled by the code. */ - .long 0 -#endif - - .p2align 4 -LOCAL(jump_vector): - /* Jump location. Is filled by the code */ - .long 0 - .long CODE_SEGMENT +LOCAL(gdt_end): VARIABLE(grub_relocator16_end) diff --git a/lib/i386/relocator32.S b/lib/i386/relocator32.S index 4f79151e2..b581305a5 100644 --- a/lib/i386/relocator32.S +++ b/lib/i386/relocator32.S @@ -16,48 +16,21 @@ * along with GRUB. If not, see . */ -#include -#include - -#ifdef __x86_64__ -#define RAX %rax -#define RSI %rsi -#else -#define RAX %eax -#define RSI %esi -#endif - /* The code segment of the protected mode. */ #define CODE_SEGMENT 0x10 /* The data segment of the protected mode. */ #define DATA_SEGMENT 0x18 +#include "relocator_common.S" + .p2align 4 /* force 16-byte alignment */ VARIABLE(grub_relocator32_start) -LOCAL(base): - /* %rax contains now our new 'base'. */ - mov RAX, RSI - add $(LOCAL(cont0) - LOCAL(base)), RAX - jmp *RAX -LOCAL(cont0): - lea (LOCAL(cont1) - LOCAL(base)) (RSI, 1), RAX - movl %eax, (LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) + PREAMBLE - lea (LOCAL(gdt) - LOCAL(base)) (RSI, 1), RAX - mov RAX, (LOCAL(gdt_addr) - LOCAL(base)) (RSI, 1) - - /* Switch to compatibility mode. */ - - lgdt (LOCAL(gdtdesc) - LOCAL(base)) (RSI, 1) - - /* Update %cs. */ - ljmp *(LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) - -LOCAL(cont1): + RELOAD_GDT .code32 - /* Update other registers. */ movl $DATA_SEGMENT, %eax movl %eax, %ds @@ -66,16 +39,15 @@ LOCAL(cont1): movl %eax, %gs movl %eax, %ss - /* Disable paging. */ - movl %cr0, %eax - andl $(~GRUB_MEMORY_CPU_CR0_PAGING_ON), %eax - movl %eax, %cr0 + DISABLE_PAGING +#ifdef __x86_64__ /* Disable amd64. */ movl $GRUB_MEMORY_CPU_AMD64_MSR, %ecx rdmsr andl $(~GRUB_MEMORY_CPU_AMD64_MSR_ON), %eax wrmsr +#endif /* Turn off PAE. */ movl %cr4, %eax @@ -143,23 +115,6 @@ LOCAL(gdt): /* Data segment. */ .byte 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x92, 0xCF, 0x00 - - .p2align 4 -LOCAL(gdtdesc): - .word 0x27 -LOCAL(gdt_addr): -#ifdef __x86_64__ - /* Filled by the code. */ - .quad 0 -#else - /* Filled by the code. */ - .long 0 -#endif - - .p2align 4 -LOCAL(jump_vector): - /* Jump location. Is filled by the code */ - .long 0 - .long CODE_SEGMENT +LOCAL(gdt_end): VARIABLE(grub_relocator32_end) diff --git a/lib/i386/relocator64.S b/lib/i386/relocator64.S index 37a77b3b5..bb086418c 100644 --- a/lib/i386/relocator64.S +++ b/lib/i386/relocator64.S @@ -16,44 +16,20 @@ * along with GRUB. If not, see . */ -#include -#include - -#ifdef __x86_64__ -#define RAX %rax -#define RSI %rsi -#else -#define RAX %eax -#define RSI %esi -#endif - #define CODE32_SEGMENT 0x18 -#define CODE64_SEGMENT 0x08 +#define CODE_SEGMENT 0x08 /* The data segment of the protected mode. */ #define DATA_SEGMENT 0x10 +#include "relocator_common.S" + .p2align 4 /* force 16-byte alignment */ VARIABLE(grub_relocator64_start) -LOCAL(base): - /* %rax contains now our new 'base'. */ - mov RAX, RSI - - add $(LOCAL(cont0) - LOCAL(base)), RAX - jmp *RAX -LOCAL(cont0): + PREAMBLE #ifndef __x86_64__ - lea (LOCAL(cont1) - LOCAL(base)) (RSI, 1), RAX - mov RAX, (LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) - - lea (LOCAL(gdt) - LOCAL(base)) (RSI, 1), RAX - mov RAX, (LOCAL(gdt_addr) - LOCAL(base)) (RSI, 1) - - /* Disable paging. */ - movl %cr0, %eax - andl $(~GRUB_MEMORY_CPU_CR0_PAGING_ON), %eax - movl %eax, %cr0 + DISABLE_PAGING /* Turn on PAE. */ movl %cr4, %eax @@ -77,11 +53,7 @@ VARIABLE(grub_relocator64_cr3) orl $GRUB_MEMORY_CPU_CR0_PAGING_ON, %eax movl %eax, %cr0 - /* Load GDT. */ - lgdt (LOCAL(gdtdesc) - LOCAL(base)) (RSI, 1) - - /* Update %cs. */ - ljmp *(LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) + RELOAD_GDT #else /* mov imm64, %rax */ .byte 0x48 @@ -91,7 +63,6 @@ VARIABLE(grub_relocator64_cr3) movq %rax, %cr3 #endif -LOCAL(cont1): .code64 /* mov imm64, %rax */ @@ -183,18 +154,7 @@ LOCAL(gdt): | (1 << 7) /* 4K granular. */) .byte 0x00 /* Base 00xxxxxx. */ - .p2align 4 -LOCAL(gdtdesc): - .word 0x20 -LOCAL(gdt_addr): - /* Filled by the code. */ - .long 0 - - .p2align 4 -LOCAL(jump_vector): - /* Jump location. Is filled by the code */ - .long 0 - .long CODE64_SEGMENT +LOCAL(gdt_end): #endif VARIABLE(grub_relocator64_end) diff --git a/lib/i386/relocator_common.S b/lib/i386/relocator_common.S new file mode 100644 index 000000000..bd5b53f95 --- /dev/null +++ b/lib/i386/relocator_common.S @@ -0,0 +1,82 @@ +/* + * GRUB -- GRand Unified Bootloader + * Copyright (C) 2009,2010 Free Software Foundation, Inc. + * + * GRUB is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * GRUB is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with GRUB. If not, see . + */ + + +#include +#include + +#ifdef __x86_64__ +#define RAX %rax +#define RSI %rsi +#else +#define RAX %eax +#define RSI %esi +#endif + + .macro DISABLE_PAGING +#ifdef GRUB_MACHINE_IEEE1275 +#endif + + movl %cr0, %eax + andl $(~GRUB_MEMORY_CPU_CR0_PAGING_ON), %eax + movl %eax, %cr0 + .endm + + .macro PREAMBLE +LOCAL(base): + /* %rax contains now our new 'base'. */ + mov RAX, RSI + + add $(LOCAL(cont0) - LOCAL(base)), RAX + jmp *RAX +LOCAL(cont0): + .endm + + .macro RELOAD_GDT + lea (LOCAL(cont1) - LOCAL(base)) (RSI, 1), RAX + movl %eax, (LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) + + lea (LOCAL(gdt) - LOCAL(base)) (RSI, 1), RAX + mov RAX, (LOCAL(gdt_addr) - LOCAL(base)) (RSI, 1) + + /* Switch to compatibility mode. */ + lgdt (LOCAL(gdtdesc) - LOCAL(base)) (RSI, 1) + + /* Update %cs. */ + ljmp *(LOCAL(jump_vector) - LOCAL(base)) (RSI, 1) + + .p2align 4 +LOCAL(gdtdesc): + .word LOCAL(gdt_end) - LOCAL(gdt) +LOCAL(gdt_addr): +#ifdef __x86_64__ + /* Filled by the code. */ + .quad 0 +#else + /* Filled by the code. */ + .long 0 +#endif + + .p2align 4 +LOCAL(jump_vector): + /* Jump location. Is filled by the code */ + .long 0 + .long CODE_SEGMENT + +LOCAL(cont1): + .endm