diff --git a/Makefile b/Makefile index 03f9fc6..a84c413 100644 --- a/Makefile +++ b/Makefile @@ -412,40 +412,45 @@ ################################################################################ ifneq (${SPD},none) -ifeq (${ARCH},aarch32) + ifeq (${ARCH},aarch32) $(error "Error: SPD is incompatible with AArch32.") -endif -ifdef EL3_PAYLOAD_BASE + endif + + ifdef EL3_PAYLOAD_BASE $(warning "SPD and EL3_PAYLOAD_BASE are incompatible build options.") $(warning "The SPD and its BL32 companion will be present but ignored.") -endif - ifeq (${SPD},spmd) - # SPMD is located in std_svc directory - SPD_DIR := std_svc - else - # All other SPDs in spd directory - SPD_DIR := spd - endif + endif - # We expect to locate an spd.mk under the specified SPD directory - SPD_MAKE := $(wildcard services/${SPD_DIR}/${SPD}/${SPD}.mk) + ifeq (${SPD},spmd) + # SPMD is located in std_svc directory + SPD_DIR := std_svc - - ifeq (${SPD_MAKE},) - $(error Error: No services/${SPD_DIR}/${SPD}/${SPD}.mk located) + ifeq ($(CTX_INCLUDE_EL2_REGS),0) + $(error spmd requires CTX_INCLUDE_EL2_REGS option) endif - $(info Including ${SPD_MAKE}) - include ${SPD_MAKE} + else + # All other SPDs in spd directory + SPD_DIR := spd + endif - # If there's BL32 companion for the chosen SPD, we expect that the SPD's - # Makefile would set NEED_BL32 to "yes". In this case, the build system - # supports two mutually exclusive options: - # * BL32 is built from source: then BL32_SOURCES must contain the list - # of source files to build BL32 - # * BL32 is a prebuilt binary: then BL32 must point to the image file - # that will be included in the FIP - # If both BL32_SOURCES and BL32 are defined, the binary takes precedence - # over the sources. + # We expect to locate an spd.mk under the specified SPD directory + SPD_MAKE := $(wildcard services/${SPD_DIR}/${SPD}/${SPD}.mk) + + ifeq (${SPD_MAKE},) + $(error Error: No services/${SPD_DIR}/${SPD}/${SPD}.mk located) + endif + $(info Including ${SPD_MAKE}) + include ${SPD_MAKE} + + # If there's BL32 companion for the chosen SPD, we expect that the SPD's + # Makefile would set NEED_BL32 to "yes". In this case, the build system + # supports two mutually exclusive options: + # * BL32 is built from source: then BL32_SOURCES must contain the list + # of source files to build BL32 + # * BL32 is a prebuilt binary: then BL32 must point to the image file + # that will be included in the FIP + # If both BL32_SOURCES and BL32 are defined, the binary takes precedence + # over the sources. endif ################################################################################ @@ -761,6 +766,7 @@ $(eval $(call assert_boolean,CTX_INCLUDE_FPREGS)) $(eval $(call assert_boolean,CTX_INCLUDE_PAUTH_REGS)) $(eval $(call assert_boolean,CTX_INCLUDE_MTE_REGS)) +$(eval $(call assert_boolean,CTX_INCLUDE_EL2_REGS)) $(eval $(call assert_boolean,DEBUG)) $(eval $(call assert_boolean,DYN_DISABLE_AUTH)) $(eval $(call assert_boolean,EL3_EXCEPTION_HANDLING)) @@ -832,6 +838,7 @@ $(eval $(call add_define,CTX_INCLUDE_PAUTH_REGS)) $(eval $(call add_define,EL3_EXCEPTION_HANDLING)) $(eval $(call add_define,CTX_INCLUDE_MTE_REGS)) +$(eval $(call add_define,CTX_INCLUDE_EL2_REGS)) $(eval $(call add_define,ENABLE_AMU)) $(eval $(call add_define,ENABLE_ASSERTIONS)) $(eval $(call add_define,ENABLE_BTI)) diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h index 1faddbe..d593997 100644 --- a/include/arch/aarch64/arch.h +++ b/include/arch/aarch64/arch.h @@ -97,6 +97,33 @@ #define ICC_SGI0R_EL1 S3_0_c12_c11_7 /******************************************************************************* + * Definitions for EL2 system registers for save/restore routine + ******************************************************************************/ + +#define CNTPOFF_EL2 S3_4_C14_C0_6 +#define HAFGRTR_EL2 S3_4_C3_C1_6 +#define HDFGRTR_EL2 S3_4_C3_C1_4 +#define HDFGWTR_EL2 S3_4_C3_C1_5 +#define HFGITR_EL2 S3_4_C1_C1_6 +#define HFGRTR_EL2 S3_4_C1_C1_4 +#define HFGWTR_EL2 S3_4_C1_C1_5 +#define ICH_EISR_EL2 S3_4_C12_C11_3 +#define ICH_ELRSR_EL2 S3_4_C12_C11_5 +#define ICH_HCR_EL2 S3_4_C12_C11_0 +#define ICH_MISR_EL2 S3_4_C12_C11_2 +#define ICH_VMCR_EL2 S3_4_C12_C11_7 +#define ICH_VTR_EL2 S3_4_C12_C11_1 +#define MPAMVPM0_EL2 S3_4_C10_C5_0 +#define MPAMVPM1_EL2 S3_4_C10_C5_1 +#define MPAMVPM2_EL2 S3_4_C10_C5_2 +#define MPAMVPM3_EL2 S3_4_C10_C5_3 +#define MPAMVPM4_EL2 S3_4_C10_C5_4 +#define MPAMVPM5_EL2 S3_4_C10_C5_5 +#define MPAMVPM6_EL2 S3_4_C10_C5_6 +#define MPAMVPM7_EL2 S3_4_C10_C5_7 +#define MPAMVPMV_EL2 S3_4_C10_C4_1 + +/******************************************************************************* * Generic timer memory mapped registers & offsets ******************************************************************************/ #define CNTCR_OFF U(0x000) diff --git a/include/lib/el3_runtime/aarch64/context.h b/include/lib/el3_runtime/aarch64/context.h index 4158c02..6559b60 100644 --- a/include/lib/el3_runtime/aarch64/context.h +++ b/include/lib/el3_runtime/aarch64/context.h @@ -136,9 +136,87 @@ #endif /* CTX_INCLUDE_MTE_REGS */ /* + * S-EL2 register set + */ + +#if CTX_INCLUDE_EL2_REGS +/* For later discussion + * ICH_AP0R_EL2 + * ICH_AP1R_EL2 + * AMEVCNTVOFF0_EL2 + * AMEVCNTVOFF1_EL2 + * ICH_LR_EL2 + */ +#define CTX_ACTLR_EL2 (CTX_MTE_REGS_END + U(0x0)) +#define CTX_AFSR0_EL2 (CTX_MTE_REGS_END + U(0x8)) +#define CTX_AFSR1_EL2 (CTX_MTE_REGS_END + U(0x10)) +#define CTX_AMAIR_EL2 (CTX_MTE_REGS_END + U(0x18)) +#define CTX_CNTHCTL_EL2 (CTX_MTE_REGS_END + U(0x20)) +#define CTX_CNTHP_CTL_EL2 (CTX_MTE_REGS_END + U(0x28)) +#define CTX_CNTHP_CVAL_EL2 (CTX_MTE_REGS_END + U(0x30)) +#define CTX_CNTHP_TVAL_EL2 (CTX_MTE_REGS_END + U(0x38)) +#define CTX_CNTPOFF_EL2 (CTX_MTE_REGS_END + U(0x40)) +#define CTX_CNTVOFF_EL2 (CTX_MTE_REGS_END + U(0x48)) +#define CTX_CPTR_EL2 (CTX_MTE_REGS_END + U(0x50)) +#define CTX_DBGVCR32_EL2 (CTX_MTE_REGS_END + U(0x58)) +#define CTX_ELR_EL2 (CTX_MTE_REGS_END + U(0x60)) +#define CTX_ESR_EL2 (CTX_MTE_REGS_END + U(0x68)) +#define CTX_FAR_EL2 (CTX_MTE_REGS_END + U(0x70)) +#define CTX_FPEXC32_EL2 (CTX_MTE_REGS_END + U(0x78)) +#define CTX_HACR_EL2 (CTX_MTE_REGS_END + U(0x80)) +#define CTX_HAFGRTR_EL2 (CTX_MTE_REGS_END + U(0x88)) +#define CTX_HCR_EL2 (CTX_MTE_REGS_END + U(0x90)) +#define CTX_HDFGRTR_EL2 (CTX_MTE_REGS_END + U(0x98)) +#define CTX_HDFGWTR_EL2 (CTX_MTE_REGS_END + U(0xA0)) +#define CTX_HFGITR_EL2 (CTX_MTE_REGS_END + U(0xA8)) +#define CTX_HFGRTR_EL2 (CTX_MTE_REGS_END + U(0xB0)) +#define CTX_HFGWTR_EL2 (CTX_MTE_REGS_END + U(0xB8)) +#define CTX_HPFAR_EL2 (CTX_MTE_REGS_END + U(0xC0)) +#define CTX_HSTR_EL2 (CTX_MTE_REGS_END + U(0xC8)) +#define CTX_ICC_SRE_EL2 (CTX_MTE_REGS_END + U(0xD0)) +#define CTX_ICH_EISR_EL2 (CTX_MTE_REGS_END + U(0xD8)) +#define CTX_ICH_ELRSR_EL2 (CTX_MTE_REGS_END + U(0xE0)) +#define CTX_ICH_HCR_EL2 (CTX_MTE_REGS_END + U(0xE8)) +#define CTX_ICH_MISR_EL2 (CTX_MTE_REGS_END + U(0xF0)) +#define CTX_ICH_VMCR_EL2 (CTX_MTE_REGS_END + U(0xF8)) +#define CTX_ICH_VTR_EL2 (CTX_MTE_REGS_END + U(0x100)) +#define CTX_MAIR_EL2 (CTX_MTE_REGS_END + U(0x108)) +#define CTX_MDCR_EL2 (CTX_MTE_REGS_END + U(0x110)) +#define CTX_MPAM2_EL2 (CTX_MTE_REGS_END + U(0x118)) +#define CTX_MPAMHCR_EL2 (CTX_MTE_REGS_END + U(0x120)) +#define CTX_MPAMVPM0_EL2 (CTX_MTE_REGS_END + U(0x128)) +#define CTX_MPAMVPM1_EL2 (CTX_MTE_REGS_END + U(0x130)) +#define CTX_MPAMVPM2_EL2 (CTX_MTE_REGS_END + U(0x138)) +#define CTX_MPAMVPM3_EL2 (CTX_MTE_REGS_END + U(0x140)) +#define CTX_MPAMVPM4_EL2 (CTX_MTE_REGS_END + U(0x148)) +#define CTX_MPAMVPM5_EL2 (CTX_MTE_REGS_END + U(0x150)) +#define CTX_MPAMVPM6_EL2 (CTX_MTE_REGS_END + U(0x158)) +#define CTX_MPAMVPM7_EL2 (CTX_MTE_REGS_END + U(0x160)) +#define CTX_MPAMVPMV_EL2 (CTX_MTE_REGS_END + U(0x168)) +#define CTX_RMR_EL2 (CTX_MTE_REGS_END + U(0x170)) +#define CTX_SCTLR_EL2 (CTX_MTE_REGS_END + U(0x178)) +#define CTX_SPSR_EL2 (CTX_MTE_REGS_END + U(0x180)) +#define CTX_SP_EL2 (CTX_MTE_REGS_END + U(0x188)) +#define CTX_TCR_EL2 (CTX_MTE_REGS_END + U(0x190)) +#define CTX_TPIDR_EL2 (CTX_MTE_REGS_END + U(0x198)) +#define CTX_TTBR0_EL2 (CTX_MTE_REGS_END + U(0x1A0)) +#define CTX_VBAR_EL2 (CTX_MTE_REGS_END + U(0x1A8)) +#define CTX_VMPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B0)) +#define CTX_VPIDR_EL2 (CTX_MTE_REGS_END + U(0x1B8)) +#define CTX_VTCR_EL2 (CTX_MTE_REGS_END + U(0x1C0)) +#define CTX_VTTBR_EL2 (CTX_MTE_REGS_END + U(0x1C8)) +#define CTX_ZCR_EL2 (CTX_MTE_REGS_END + U(0x1B0)) + +/* Align to the next 16 byte boundary */ +#define CTX_EL2_REGS_END (CTX_MTE_REGS_END + U(0x1C0)) +#else +#define CTX_EL2_REGS_END CTX_MTE_REGS_END +#endif /* CTX_INCLUDE_EL2_REGS */ + +/* * End of system registers. */ -#define CTX_SYSREGS_END CTX_MTE_REGS_END +#define CTX_SYSREGS_END CTX_EL2_REGS_END /******************************************************************************* * Constants that allow assembler code to access members of and the 'fp_regs' @@ -255,11 +333,10 @@ DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL); /* - * AArch64 EL1 system register context structure for preserving the - * architectural state during switches from one security state to - * another in EL1. + * AArch64 EL1/EL2 system register context structure for preserving the + * architectural state during world switches. */ -DEFINE_REG_STRUCT(el1_sys_regs, CTX_SYSREG_ALL); +DEFINE_REG_STRUCT(sys_regs, CTX_SYSREG_ALL); /* * AArch64 floating point register context structure for preserving @@ -304,7 +381,7 @@ typedef struct cpu_context { gp_regs_t gpregs_ctx; el3_state_t el3state_ctx; - el1_sys_regs_t sysregs_ctx; + sys_regs_t sysregs_ctx; #if CTX_INCLUDE_FPREGS fp_regs_t fpregs_ctx; #endif @@ -387,8 +464,14 @@ /******************************************************************************* * Function prototypes ******************************************************************************/ -void el1_sysregs_context_save(el1_sys_regs_t *regs); -void el1_sysregs_context_restore(el1_sys_regs_t *regs); +void el1_sysregs_context_save(sys_regs_t *regs); +void el1_sysregs_context_restore(sys_regs_t *regs); + +#if CTX_INCLUDE_EL2_REGS +void el2_sysregs_context_save(sys_regs_t *regs); +void el2_sysregs_context_restore(sys_regs_t *regs); +#endif + #if CTX_INCLUDE_FPREGS void fpregs_context_save(fp_regs_t *regs); void fpregs_context_restore(fp_regs_t *regs); diff --git a/include/lib/el3_runtime/context_mgmt.h b/include/lib/el3_runtime/context_mgmt.h index 17955e3..b36cd3d 100644 --- a/include/lib/el3_runtime/context_mgmt.h +++ b/include/lib/el3_runtime/context_mgmt.h @@ -36,6 +36,11 @@ void cm_prepare_el3_exit(uint32_t security_state); #ifdef __aarch64__ +#if CTX_INCLUDE_EL2_REGS +void cm_el2_sysregs_context_save(uint32_t security_state); +void cm_el2_sysregs_context_restore(uint32_t security_state); +#endif + void cm_el1_sysregs_context_save(uint32_t security_state); void cm_el1_sysregs_context_restore(uint32_t security_state); void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint); diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S index 9bd25ba..bcc7eef 100644 --- a/lib/el3_runtime/aarch64/context.S +++ b/lib/el3_runtime/aarch64/context.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -9,6 +9,11 @@ #include #include +#if CTX_INCLUDE_EL2_REGS + .global el2_sysregs_context_save + .global el2_sysregs_context_restore +#endif + .global el1_sysregs_context_save .global el1_sysregs_context_restore #if CTX_INCLUDE_FPREGS @@ -19,6 +24,390 @@ .global restore_gp_pmcr_pauth_regs .global el3_exit +#if CTX_INCLUDE_EL2_REGS + +/* ----------------------------------------------------- + * The following function strictly follows the AArch64 + * PCS to use x9-x17 (temporary caller-saved registers) + * to save EL1 system register context. It assumes that + * 'x0' is pointing to a 'el1_sys_regs' structure where + * the register context will be saved. + * ----------------------------------------------------- + */ +func el2_sysregs_context_save + + mrs x9, actlr_el2 + str x9, [x0, #CTX_ACTLR_EL2] + + mrs x9, afsr0_el2 + str x9, [x0, #CTX_AFSR0_EL2] + + mrs x9, afsr1_el2 + str x9, [x0, #CTX_AFSR1_EL2] + + mrs x9, amair_el2 + str x9, [x0, #CTX_AMAIR_EL2] + + mrs x9, cnthctl_el2 + str x9, [x0, #CTX_CNTHCTL_EL2] + + mrs x9, cnthp_ctl_el2 + str x9, [x0, #CTX_CNTHP_CTL_EL2] + + mrs x9, cnthp_cval_el2 + str x9, [x0, #CTX_CNTHP_CVAL_EL2] + + mrs x9, cnthp_tval_el2 + str x9, [x0, #CTX_CNTHP_TVAL_EL2] + + mrs x9, CNTPOFF_EL2 + str x9, [x0, #CTX_CNTPOFF_EL2] + + mrs x9, cntvoff_el2 + str x9, [x0, #CTX_CNTVOFF_EL2] + + mrs x9, cptr_el2 + str x9, [x0, #CTX_CPTR_EL2] + + mrs x9, dbgvcr32_el2 + str x9, [x0, #CTX_DBGVCR32_EL2] + + mrs x9, elr_el2 + str x9, [x0, #CTX_ELR_EL2] + + mrs x9, esr_el2 + str x9, [x0, #CTX_ESR_EL2] + + mrs x9, far_el2 + str x9, [x0, #CTX_FAR_EL2] + + mrs x9, fpexc32_el2 + str x9, [x0, #CTX_FPEXC32_EL2] + + mrs x9, hacr_el2 + str x9, [x0, #CTX_HACR_EL2] + + mrs x9, HAFGRTR_EL2 + str x9, [x0, #CTX_HAFGRTR_EL2] + + mrs x9, hcr_el2 + str x9, [x0, #CTX_HCR_EL2] + + mrs x9, HDFGRTR_EL2 + str x9, [x0, #CTX_HDFGRTR_EL2] + + mrs x9, HDFGWTR_EL2 + str x9, [x0, #CTX_HDFGWTR_EL2] + + mrs x9, HFGITR_EL2 + str x9, [x0, #CTX_HFGITR_EL2] + + mrs x9, HFGRTR_EL2 + str x9, [x0, #CTX_HFGRTR_EL2] + + mrs x9, HFGWTR_EL2 + str x9, [x0, #CTX_HFGWTR_EL2] + + mrs x9, hpfar_el2 + str x9, [x0, #CTX_HPFAR_EL2] + + mrs x9, hstr_el2 + str x9, [x0, #CTX_HSTR_EL2] + + mrs x9, ICC_SRE_EL2 + str x9, [x0, #CTX_ICC_SRE_EL2] + + mrs x9, ICH_EISR_EL2 + str x9, [x0, #CTX_ICH_EISR_EL2] + + mrs x9, ICH_ELRSR_EL2 + str x9, [x0, #CTX_ICH_ELRSR_EL2] + + mrs x9, ICH_HCR_EL2 + str x9, [x0, #CTX_ICH_HCR_EL2] + + mrs x9, ICH_MISR_EL2 + str x9, [x0, #CTX_ICH_MISR_EL2] + + mrs x9, ICH_VMCR_EL2 + str x9, [x0, #CTX_ICH_VMCR_EL2] + + mrs x9, ICH_VTR_EL2 + str x9, [x0, #CTX_ICH_VTR_EL2] + + mrs x9, mair_el2 + str x9, [x0, #CTX_MAIR_EL2] + + mrs x9, mdcr_el2 + str x9, [x0, #CTX_MDCR_EL2] + + mrs x9, MPAM2_EL2 + str x9, [x0, #CTX_MPAM2_EL2] + + mrs x9, MPAMHCR_EL2 + str x9, [x0, #CTX_MPAMHCR_EL2] + + mrs x9, MPAMVPM0_EL2 + str x9, [x0, #CTX_MPAMVPM0_EL2] + + mrs x9, MPAMVPM1_EL2 + str x9, [x0, #CTX_MPAMVPM1_EL2] + + mrs x9, MPAMVPM2_EL2 + str x9, [x0, #CTX_MPAMVPM2_EL2] + + mrs x9, MPAMVPM3_EL2 + str x9, [x0, #CTX_MPAMVPM3_EL2] + + mrs x9, MPAMVPM4_EL2 + str x9, [x0, #CTX_MPAMVPM4_EL2] + + mrs x9, MPAMVPM5_EL2 + str x9, [x0, #CTX_MPAMVPM5_EL2] + + mrs x9, MPAMVPM6_EL2 + str x9, [x0, #CTX_MPAMVPM6_EL2] + + mrs x9, MPAMVPM7_EL2 + str x9, [x0, #CTX_MPAMVPM7_EL2] + + mrs x9, MPAMVPMV_EL2 + str x9, [x0, #CTX_MPAMVPMV_EL2] + + mrs x9, rmr_el2 + str x9, [x0, #CTX_RMR_EL2] + + mrs x9, sctlr_el2 + str x9, [x0, #CTX_SCTLR_EL2] + + mrs x9, spsr_el2 + str x9, [x0, #CTX_SPSR_EL2] + + mrs x9, sp_el2 + str x9, [x0, #CTX_SP_EL2] + + mrs x9, tcr_el2 + str x9, [x0, #CTX_TCR_EL2] + + mrs x9, tpidr_el2 + str x9, [x0, #CTX_TPIDR_EL2] + + mrs x9, ttbr0_el2 + str x9, [x0, #CTX_TTBR0_EL2] + + mrs x9, vbar_el2 + str x9, [x0, #CTX_VBAR_EL2] + + mrs x9, vmpidr_el2 + str x9, [x0, #CTX_VMPIDR_EL2] + + mrs x9, vpidr_el2 + str x9, [x0, #CTX_VPIDR_EL2] + + mrs x9, vtcr_el2 + str x9, [x0, #CTX_VTCR_EL2] + + mrs x9, vttbr_el2 + str x9, [x0, #CTX_VTTBR_EL2] + + mrs x9, ZCR_EL2 + str x9, [x0, #CTX_ZCR_EL2] + + ret +endfunc el2_sysregs_context_save + +/* ----------------------------------------------------- + * The following function strictly follows the AArch64 + * PCS to use x9-x17 (temporary caller-saved registers) + * to restore EL1 system register context. It assumes + * that 'x0' is pointing to a 'el1_sys_regs' structure + * from where the register context will be restored + * ----------------------------------------------------- + */ +func el2_sysregs_context_restore + + ldr x9, [x0, #CTX_ACTLR_EL2] + msr actlr_el2, x9 + + ldr x9, [x0, #CTX_AFSR0_EL2] + msr afsr0_el2, x9 + + ldr x9, [x0, #CTX_AFSR1_EL2] + msr afsr1_el2, x9 + + ldr x9, [x0, #CTX_AMAIR_EL2] + msr amair_el2, x9 + + ldr x9, [x0, #CTX_CNTHCTL_EL2] + msr cnthctl_el2, x9 + + ldr x9, [x0, #CTX_CNTHP_CTL_EL2] + msr cnthp_ctl_el2, x9 + + ldr x9, [x0, #CTX_CNTHP_CVAL_EL2] + msr cnthp_cval_el2, x9 + + ldr x9, [x0, #CTX_CNTHP_TVAL_EL2] + msr cnthp_tval_el2, x9 + + ldr x9, [x0, #CTX_CNTPOFF_EL2] + msr CNTPOFF_EL2, x9 + + ldr x9, [x0, #CTX_CNTVOFF_EL2] + msr cntvoff_el2, x9 + + ldr x9, [x0, #CTX_CPTR_EL2] + msr cptr_el2, x9 + + ldr x9, [x0, #CTX_DBGVCR32_EL2] + msr dbgvcr32_el2, x9 + + ldr x9, [x0, #CTX_ELR_EL2] + msr elr_el2, x9 + + ldr x9, [x0, #CTX_ESR_EL2] + msr esr_el2, x9 + + ldr x9, [x0, #CTX_FAR_EL2] + msr far_el2, x9 + + ldr x9, [x0, #CTX_FPEXC32_EL2] + msr fpexc32_el2, x9 + + ldr x9, [x0, #CTX_HACR_EL2] + msr hacr_el2, x9 + + ldr x9, [x0, #CTX_HAFGRTR_EL2] + msr HAFGRTR_EL2, x9 + + ldr x9, [x0, #CTX_HCR_EL2] + msr hcr_el2, x9 + + ldr x9, [x0, #CTX_HDFGRTR_EL2] + msr HDFGRTR_EL2, x9 + + ldr x9, [x0, #CTX_HDFGWTR_EL2] + msr HDFGWTR_EL2, x9 + + ldr x9, [x0, #CTX_HFGITR_EL2] + msr HFGITR_EL2, x9 + + ldr x9, [x0, #CTX_HFGRTR_EL2] + msr HFGRTR_EL2, x9 + + ldr x9, [x0, #CTX_HFGWTR_EL2] + msr HFGWTR_EL2, x9 + + ldr x9, [x0, #CTX_HPFAR_EL2] + msr hpfar_el2, x9 + + ldr x9, [x0, #CTX_HSTR_EL2] + msr hstr_el2, x9 + + ldr x9, [x0, #CTX_ICC_SRE_EL2] + msr ICC_SRE_EL2, x9 + + ldr x9, [x0, #CTX_ICH_EISR_EL2] + msr ICH_EISR_EL2, x9 + + ldr x9, [x0, #CTX_ICH_ELRSR_EL2] + msr ICH_ELRSR_EL2, x9 + + ldr x9, [x0, #CTX_ICH_HCR_EL2] + msr ICH_HCR_EL2, x9 + + ldr x9, [x0, #CTX_ICH_MISR_EL2] + msr ICH_MISR_EL2, x9 + + ldr x9, [x0, #CTX_ICH_VMCR_EL2] + msr ICH_VMCR_EL2, x9 + + ldr x9, [x0, #CTX_ICH_VTR_EL2] + msr ICH_VTR_EL2, x9 + + ldr x9, [x0, #CTX_MAIR_EL2] + msr mair_el2, x9 + + ldr x9, [x0, #CTX_MDCR_EL2] + msr mdcr_el2, x9 + + ldr x9, [x0, #CTX_MPAM2_EL2] + msr MPAM2_EL2, x9 + + ldr x9, [x0, #CTX_MPAMHCR_EL2] + msr MPAMHCR_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM0_EL2] + msr MPAMVPM0_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM1_EL2] + msr MPAMVPM1_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM2_EL2] + msr MPAMVPM2_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM3_EL2] + msr MPAMVPM3_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM4_EL2] + msr MPAMVPM4_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM5_EL2] + msr MPAMVPM5_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM6_EL2] + msr MPAMVPM6_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPM7_EL2] + msr MPAMVPM7_EL2, x9 + + ldr x9, [x0, #CTX_MPAMVPMV_EL2] + msr MPAMVPMV_EL2, x9 + + ldr x9, [x0, #CTX_RMR_EL2] + msr rmr_el2, x9 + + ldr x9, [x0, #CTX_SCTLR_EL2] + msr sctlr_el2, x9 + + ldr x9, [x0, #CTX_SPSR_EL2] + msr spsr_el2, x9 + + ldr x9, [x0, #CTX_SP_EL2] + msr sp_el2, x9 + + ldr x9, [x0, #CTX_TCR_EL2] + msr tcr_el2, x9 + + ldr x9, [x0, #CTX_TPIDR_EL2] + msr tpidr_el2, x9 + + ldr x9, [x0, #CTX_TTBR0_EL2] + msr ttbr0_el2, x9 + + ldr x9, [x0, #CTX_VBAR_EL2] + msr vbar_el2, x9 + + ldr x9, [x0, #CTX_VMPIDR_EL2] + msr vmpidr_el2, x9 + + ldr x9, [x0, #CTX_VPIDR_EL2] + msr vpidr_el2, x9 + + ldr x9, [x0, #CTX_VTCR_EL2] + msr vtcr_el2, x9 + + ldr x9, [x0, #CTX_VTTBR_EL2] + msr vttbr_el2, x9 + + ldr x9, [x0, #CTX_ZCR_EL2] + msr ZCR_EL2, x9 + + ret +endfunc el2_sysregs_context_restore + +#endif /* CTX_INCLUDE_EL2_REGS */ + /* ------------------------------------------------------------------ * The following function strictly follows the AArch64 PCS to use * x9-x17 (temporary caller-saved registers) to save EL1 system diff --git a/lib/el3_runtime/aarch64/context_mgmt.c b/lib/el3_runtime/aarch64/context_mgmt.c index 546e39e..f59bcfc 100644 --- a/lib/el3_runtime/aarch64/context_mgmt.c +++ b/lib/el3_runtime/aarch64/context_mgmt.c @@ -530,6 +530,52 @@ cm_set_next_eret_context(security_state); } +#if CTX_INCLUDE_EL2_REGS +/******************************************************************************* + * Save EL2 sysreg context + ******************************************************************************/ +void cm_el2_sysregs_context_save(uint32_t security_state) +{ + u_register_t scr_el3 = read_scr(); + + /* + * Always save the non-secure EL2 context, only save the + * S-EL2 context if S-EL2 is enabled. + */ + if ((security_state == NON_SECURE) || + ((scr_el3 & SCR_EEL2_BIT) != 0U)) { + cpu_context_t *ctx; + + ctx = cm_get_context(security_state); + assert(ctx != NULL); + + el2_sysregs_context_save(get_sysregs_ctx(ctx)); + } +} + +/******************************************************************************* + * Restore EL2 sysreg context + ******************************************************************************/ +void cm_el2_sysregs_context_restore(uint32_t security_state) +{ + u_register_t scr_el3 = read_scr(); + + /* + * Always restore the non-secure EL2 context, only restore the + * S-EL2 context if S-EL2 is enabled. + */ + if ((security_state == NON_SECURE) || + ((scr_el3 & SCR_EEL2_BIT) != 0U)) { + cpu_context_t *ctx; + + ctx = cm_get_context(security_state); + assert(ctx != NULL); + + el2_sysregs_context_restore(get_sysregs_ctx(ctx)); + } +} +#endif /* CTX_INCLUDE_EL2_REGS */ + /******************************************************************************* * The next four functions are used by runtime services to save and restore * EL1 context on the 'cpu_context' structure for the specified security diff --git a/make_helpers/defaults.mk b/make_helpers/defaults.mk index 60958a1..8e1f273 100644 --- a/make_helpers/defaults.mk +++ b/make_helpers/defaults.mk @@ -262,3 +262,8 @@ # Enable Link Time Optimization ENABLE_LTO := 0 + +# Build flag to include EL2 registers in cpu context save and restore during +# S-EL2 firmware entry/exit. This flag is to be used with SPD=spmd option. +# Default is 0. +CTX_INCLUDE_EL2_REGS := 0 diff --git a/services/std_svc/spmd/spmd_main.c b/services/std_svc/spmd/spmd_main.c index 677f639..1107190 100644 --- a/services/std_svc/spmd/spmd_main.c +++ b/services/std_svc/spmd/spmd_main.c @@ -49,6 +49,7 @@ /* Restore the context assigned above */ cm_el1_sysregs_context_restore(SECURE); + cm_el2_sysregs_context_restore(SECURE); cm_set_next_eret_context(SECURE); /* Invalidate TLBs at EL1. */ @@ -60,6 +61,7 @@ /* Save secure state */ cm_el1_sysregs_context_save(SECURE); + cm_el2_sysregs_context_save(SECURE); return rc; } @@ -321,9 +323,11 @@ /* Save incoming security state */ cm_el1_sysregs_context_save(in_sstate); + cm_el2_sysregs_context_save(in_sstate); /* Restore outgoing security state */ cm_el1_sysregs_context_restore(out_sstate); + cm_el2_sysregs_context_restore(out_sstate); cm_set_next_eret_context(out_sstate); SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4, @@ -366,9 +370,11 @@ if (in_sstate == NON_SECURE) { /* Save incoming security state */ cm_el1_sysregs_context_save(in_sstate); + cm_el2_sysregs_context_save(in_sstate); /* Restore outgoing security state */ cm_el1_sysregs_context_restore(out_sstate); + cm_el2_sysregs_context_restore(out_sstate); cm_set_next_eret_context(out_sstate); SMC_RET8(cm_get_context(out_sstate), smc_fid, @@ -432,9 +438,11 @@ /* Save incoming security state */ cm_el1_sysregs_context_save(in_sstate); + cm_el2_sysregs_context_save(in_sstate); /* Restore outgoing security state */ cm_el1_sysregs_context_restore(out_sstate); + cm_el2_sysregs_context_restore(out_sstate); cm_set_next_eret_context(out_sstate); SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4, @@ -466,9 +474,11 @@ /* Save incoming security state */ cm_el1_sysregs_context_save(in_sstate); + cm_el2_sysregs_context_save(in_sstate); /* Restore outgoing security state */ cm_el1_sysregs_context_restore(out_sstate); + cm_el2_sysregs_context_restore(out_sstate); cm_set_next_eret_context(out_sstate); SMC_RET8(cm_get_context(out_sstate), smc_fid, x1, x2, x3, x4,