
From: Hongbo Zhang hongbo.zhang@nxp.com
This patch adds secure_text, secure_data and secure_stack sections for ARMv8 to hold PSCI text and data, and it is based on the legacy implementation of ARMv7.
Signed-off-by: Hongbo Zhang hongbo.zhang@nxp.com --- arch/arm/config.mk | 3 +- arch/arm/cpu/armv8/u-boot.lds | 65 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 1 deletion(-)
diff --git a/arch/arm/config.mk b/arch/arm/config.mk index 8f85862..a3d46ea 100644 --- a/arch/arm/config.mk +++ b/arch/arm/config.mk @@ -118,7 +118,8 @@ endif
# limit ourselves to the sections we want in the .bin. ifdef CONFIG_ARM64 -OBJCOPYFLAGS += -j .text -j .rodata -j .data -j .u_boot_list -j .rela.dyn +OBJCOPYFLAGS += -j .text -j .secure_text -j .secure_data -j .rodata -j .data \ + -j .u_boot_list -j .rela.dyn else OBJCOPYFLAGS += -j .text -j .secure_text -j .secure_data -j .rodata -j .hash \ -j .data -j .got -j .got.plt -j .u_boot_list -j .rel.dyn diff --git a/arch/arm/cpu/armv8/u-boot.lds b/arch/arm/cpu/armv8/u-boot.lds index fd15ad5..464eccf 100644 --- a/arch/arm/cpu/armv8/u-boot.lds +++ b/arch/arm/cpu/armv8/u-boot.lds @@ -8,11 +8,17 @@ * SPDX-License-Identifier: GPL-2.0+ */
+#include <config.h> +#include <asm/psci.h> + OUTPUT_FORMAT("elf64-littleaarch64", "elf64-littleaarch64", "elf64-littleaarch64") OUTPUT_ARCH(aarch64) ENTRY(_start) SECTIONS { +#if defined(CONFIG_ARMV8_SECURE_BASE) + /DISCARD/ : { *(.rela._secure*) } +#endif . = 0x00000000;
. = ALIGN(8); @@ -23,6 +29,65 @@ SECTIONS *(.text*) }
+#ifdef CONFIG_ARMV8_PSCI + .__secure_start : +#ifndef CONFIG_ARMV8_SECURE_BASE + ALIGN(CONSTANT(COMMONPAGESIZE)) +#endif + { + KEEP(*(.__secure_start)) + } + +#ifndef CONFIG_ARMV8_SECURE_BASE +#define CONFIG_ARMV8_SECURE_BASE +#define __ARMV8_PSCI_STACK_IN_RAM +#endif + .secure_text CONFIG_ARMV8_SECURE_BASE : + AT(ADDR(.__secure_start) + SIZEOF(.__secure_start)) + { + *(._secure.text) + } + + .secure_data : AT(LOADADDR(.secure_text) + SIZEOF(.secure_text)) + { + *(._secure.data) + } + + .secure_stack ALIGN(ADDR(.secure_data) + SIZEOF(.secure_data), + CONSTANT(COMMONPAGESIZE)) (NOLOAD) : +#ifdef __ARMV8_PSCI_STACK_IN_RAM + AT(ADDR(.secure_stack)) +#else + AT(LOADADDR(.secure_data) + SIZEOF(.secure_data)) +#endif + { + KEEP(*(.__secure_stack_start)) + +#ifdef CONFIG_ARMV8_PSCI_NR_CPUS + . = . + CONFIG_ARMV8_PSCI_NR_CPUS * ARM_PSCI_STACK_SIZE; +#else + . = . + 4 * ARM_PSCI_STACK_SIZE; +#endif + . = ALIGN(CONSTANT(COMMONPAGESIZE)); + + KEEP(*(.__secure_stack_end)) + +#ifdef CONFIG_ARMV8_SECURE_MAX_SIZE + ASSERT((. - ADDR(.secure_text)) <= CONFIG_ARMV8_SECURE_MAX_SIZE, + "Error: secure section exceeds secure memory size"); +#endif + } + +#ifndef __ARMV8_PSCI_STACK_IN_RAM + . = LOADADDR(.secure_stack); +#endif + + .__secure_end : AT(ADDR(.__secure_end)) { + KEEP(*(.__secure_end)) + LONG(0x1d1071c); /* Must output something to reset LMA */ + } +#endif + . = ALIGN(8); .rodata : { *(SORT_BY_ALIGNMENT(SORT_BY_NAME(.rodata*))) }