
This add relocation of .got entries produced by -fpic. -fpic produces 2-3% smaller code and is faster. Unfortunately gcc promotes -fpic to -fPIC when -mrelocatable is used so one need a very small patch to gcc too(sent upstream).
Signed-off-by: Joakim Tjernlund Joakim.Tjernlund@transmode.se --- arch/powerpc/cpu/mpc83xx/start.S | 18 ++++++++++++++++++ arch/powerpc/cpu/mpc83xx/u-boot.lds | 1 + 2 files changed, 19 insertions(+), 0 deletions(-)
diff --git a/arch/powerpc/cpu/mpc83xx/start.S b/arch/powerpc/cpu/mpc83xx/start.S index c7d85a8..95ae1d8 100644 --- a/arch/powerpc/cpu/mpc83xx/start.S +++ b/arch/powerpc/cpu/mpc83xx/start.S @@ -69,6 +69,8 @@ */ START_GOT GOT_ENTRY(_GOT2_TABLE_) + GOT_ENTRY(_GOT_TABLE_) + GOT_ENTRY(_GLOBAL_OFFSET_TABLE_) GOT_ENTRY(__bss_start) GOT_ENTRY(_end)
@@ -951,6 +953,22 @@ in_ram: stw r0,0(r3) 2: bdnz 1b
+ lwz r4,GOT(_GLOBAL_OFFSET_TABLE_) + addi r4,r4,-4 /* don't write over blrl in GOT */ + lwz r3,GOT(_GOT_TABLE_) + subf. r4,r3,r4 /* r4 - r3 */ + ble 3f + srwi r4,r4,2 /* r4/4 */ + mr r5,r11 + mtctr r4 + addi r3,r3,-4 +1: lwzu r0,4(r3) + cmpwi r0,0 + beq- 2f + add r0,r0,r11 + stw r0,0(r3) +2: bdnz 1b +3: #ifndef CONFIG_NAND_SPL /* * Now adjust the fixups and the pointers to the fixups diff --git a/arch/powerpc/cpu/mpc83xx/u-boot.lds b/arch/powerpc/cpu/mpc83xx/u-boot.lds index 0b74a13..a498a37 100644 --- a/arch/powerpc/cpu/mpc83xx/u-boot.lds +++ b/arch/powerpc/cpu/mpc83xx/u-boot.lds @@ -67,6 +67,7 @@ SECTIONS PROVIDE (erotext = .); .reloc : { + _GOT_TABLE_ = .; *(.got) _GOT2_TABLE_ = .; *(.got2)