To make future changes less error prone, and to slightly simplify a possible future conversion to a relocatable trampoline even for the multiboot path (pretty desirable given that we had to change the trampoline base a number of times to escape collisions with firmware placed data), - remove final uses of bootsym_phys() from trampoline.S, allowing the symbol to be undefined before including this file (to make sure no new references get added) - replace two easy to deal with uses of bootsym_phys() in head.S - remove an easy to replace reference to BOOT_TRAMPOLINE Signed-off-by: Jan Beulich --- a/xen/arch/x86/boot/head.S +++ b/xen/arch/x86/boot/head.S @@ -202,11 +202,11 @@ __start: /* Copy bootstrap trampoline to low memory, below 1MB. */ mov $sym_phys(trampoline_start),%esi - mov $bootsym_phys(trampoline_start),%edi + mov %edx,%edi mov $trampoline_end - trampoline_start,%ecx rep movsb - mov $bootsym_phys(early_stack),%esp + lea early_stack-trampoline_start(%edx),%esp call cmdline_parse_early /* Jump into the relocated trampoline. */ @@ -214,6 +214,8 @@ __start: #include "cmdline.S" +#undef bootsym_phys + reloc: #include "reloc.S" --- a/xen/arch/x86/boot/trampoline.S +++ b/xen/arch/x86/boot/trampoline.S @@ -132,7 +132,7 @@ high_start: .code32 trampoline_boot_cpu_entry: - cmpb $0,bootsym_phys(skip_realmode) + cmpb $0,bootsym_rel(skip_realmode,5) jnz .Lskip_realmode /* Load pseudo-real-mode segments. */ @@ -152,7 +152,7 @@ trampoline_boot_cpu_entry: /* Load proper real-mode values into %cs, %ds, %es and %ss. */ ljmp $(BOOT_TRAMPOLINE>>4),$bootsym(1f) -1: mov $(BOOT_TRAMPOLINE>>4),%ax +1: mov %cs,%ax mov %ax,%ds mov %ax,%es mov %ax,%ss @@ -195,7 +195,7 @@ trampoline_boot_cpu_entry: lmsw %ax # CR0.PE = 1 (enter protected mode) /* Load proper protected-mode values into all segment registers. */ - ljmpl $BOOT_CS32,$bootsym_phys(1f) + ljmpl $BOOT_CS32,$bootsym_rel(1f,6) .code32 1: mov $BOOT_DS,%eax mov %eax,%ds