diff --git a/bl32/sp_min/aarch32/entrypoint.S b/bl32/sp_min/aarch32/entrypoint.S index b2b7953..e7528d3 100644 --- a/bl32/sp_min/aarch32/entrypoint.S +++ b/bl32/sp_min/aarch32/entrypoint.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -17,6 +17,8 @@ .globl sp_min_vector_table .globl sp_min_entrypoint .globl sp_min_warm_entrypoint + .globl sp_min_handle_smc + .globl sp_min_handle_fiq .macro route_fiq_to_sp_min reg /* ----------------------------------------------------- @@ -43,12 +45,12 @@ vector_base sp_min_vector_table b sp_min_entrypoint b plat_panic_handler /* Undef */ - b handle_smc /* Syscall */ + b sp_min_handle_smc /* Syscall */ b plat_panic_handler /* Prefetch abort */ b plat_panic_handler /* Data abort */ b plat_panic_handler /* Reserved */ b plat_panic_handler /* IRQ */ - b handle_fiq /* FIQ */ + b sp_min_handle_fiq /* FIQ */ /* @@ -151,7 +153,7 @@ /* * SMC handling function for SP_MIN. */ -func handle_smc +func sp_min_handle_smc /* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */ str lr, [sp, #SMC_CTX_LR_MON] @@ -199,12 +201,12 @@ /* `r0` points to `smc_ctx_t` */ b sp_min_exit -endfunc handle_smc +endfunc sp_min_handle_smc /* * Secure Interrupts handling function for SP_MIN. */ -func handle_fiq +func sp_min_handle_fiq #if !SP_MIN_WITH_SECURE_FIQ b plat_panic_handler #else @@ -242,7 +244,7 @@ b sp_min_exit #endif -endfunc handle_fiq +endfunc sp_min_handle_fiq /* * The Warm boot entrypoint for SP_MIN. diff --git a/bl32/sp_min/sp_min.mk b/bl32/sp_min/sp_min.mk index 56489a3..67a1981 100644 --- a/bl32/sp_min/sp_min.mk +++ b/bl32/sp_min/sp_min.mk @@ -1,5 +1,5 @@ # -# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. +# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. # # SPDX-License-Identifier: BSD-3-Clause # @@ -26,6 +26,11 @@ BL32_SOURCES += lib/extensions/amu/aarch32/amu.c endif +ifeq (${WORKAROUND_CVE_2017_5715},1) +BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \ + bl32/sp_min/workaround_cve_2017_5715_icache_inv.S +endif + BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S # Include the platform-specific SP_MIN Makefile diff --git a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S new file mode 100644 index 0000000..5387cef --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include + + .globl workaround_bpiall_runtime_exceptions + +vector_base workaround_bpiall_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the branch predictor, `r0` is a dummy register + * and is unused. + */ + stcopr r0, BPIALL + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + tst sp, #2 + bne 2f + + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S new file mode 100644 index 0000000..9102b02 --- /dev/null +++ b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved. + * + * SPDX-License-Identifier: BSD-3-Clause + */ + +#include + + .globl workaround_icache_inv_runtime_exceptions + +vector_base workaround_icache_inv_runtime_exceptions + /* We encode the exception entry in the bottom 3 bits of SP */ + add sp, sp, #1 /* Reset: 0b111 */ + add sp, sp, #1 /* Undef: 0b110 */ + add sp, sp, #1 /* Syscall: 0b101 */ + add sp, sp, #1 /* Prefetch abort: 0b100 */ + add sp, sp, #1 /* Data abort: 0b011 */ + add sp, sp, #1 /* Reserved: 0b010 */ + add sp, sp, #1 /* IRQ: 0b001 */ + nop /* FIQ: 0b000 */ + + /* + * Invalidate the instruction cache, which we assume also + * invalidates the branch predictor. This may depend on + * other CPU specific changes (e.g. an ACTLR setting). + */ + stcopr r0, ICIALLU + isb + + /* + * As we cannot use any temporary registers and cannot + * clobber SP, we can decode the exception entry using + * an unrolled binary search. + * + * Note, if this code is re-used by other secure payloads, + * the below exception entry vectors must be changed to + * the vectors specific to that secure payload. + */ + + tst sp, #4 + bne 1f + + tst sp, #2 + bne 3f + + /* Expected encoding: 0x1 and 0x0 */ + tst sp, #1 + /* Restore original value of SP by clearing the bottom 3 bits */ + bic sp, sp, #0x7 + bne plat_panic_handler /* IRQ */ + b sp_min_handle_fiq /* FIQ */ + +1: + /* Expected encoding: 0x4 and 0x5 */ + tst sp, #2 + bne 2f + + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_handle_smc /* Syscall */ + b plat_panic_handler /* Prefetch abort */ + +2: + /* Expected encoding: 0x7 and 0x6 */ + tst sp, #1 + bic sp, sp, #0x7 + bne sp_min_entrypoint /* Reset */ + b plat_panic_handler /* Undef */ + +3: + /* Expected encoding: 0x2 and 0x3 */ + tst sp, #1 + bic sp, sp, #0x7 + bne plat_panic_handler /* Data abort */ + b plat_panic_handler /* Reserved */ diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S index 59e99f8..74fb582 100644 --- a/include/common/aarch32/el3_common_macros.S +++ b/include/common/aarch32/el3_common_macros.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -14,7 +14,7 @@ /* * Helper macro to initialise EL3 registers we care about. */ - .macro el3_arch_init_common _exception_vectors + .macro el3_arch_init_common /* --------------------------------------------------------------------- * SCTLR has already been initialised - read current value before * modifying. @@ -34,15 +34,6 @@ isb /* --------------------------------------------------------------------- - * Set the exception vectors (VBAR/MVBAR). - * --------------------------------------------------------------------- - */ - ldr r0, =\_exception_vectors - stcopr r0, VBAR - stcopr r0, MVBAR - isb - - /* --------------------------------------------------------------------- * Initialise SCR, setting all fields rather than relying on the hw. * * SCR.SIF: Enabled so that Secure state instruction fetches from @@ -211,6 +202,15 @@ .endif /* _warm_boot_mailbox */ /* --------------------------------------------------------------------- + * Set the exception vectors (VBAR/MVBAR). + * --------------------------------------------------------------------- + */ + ldr r0, =\_exception_vectors + stcopr r0, VBAR + stcopr r0, MVBAR + isb + + /* --------------------------------------------------------------------- * It is a cold boot. * Perform any processor specific actions upon reset e.g. cache, TLB * invalidations etc. @@ -218,7 +218,7 @@ */ bl reset_handler - el3_arch_init_common \_exception_vectors + el3_arch_init_common .if \_secondary_cold_boot /* ------------------------------------------------------------- diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h index 4d2a5fc..134d534 100644 --- a/include/lib/aarch32/arch.h +++ b/include/lib/aarch32/arch.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -426,6 +426,8 @@ #define TLBIMVAA p15, 0, c8, c7, 3 #define TLBIMVAAIS p15, 0, c8, c3, 3 #define BPIALLIS p15, 0, c7, c1, 6 +#define BPIALL p15, 0, c7, c5, 6 +#define ICIALLU p15, 0, c7, c5, 0 #define HSCTLR p15, 4, c1, c0, 0 #define HCR p15, 4, c1, c1, 0 #define HCPTR p15, 4, c1, c1, 2 diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h index 53f1aa4..ed3b722 100644 --- a/include/lib/aarch32/smcc_helpers.h +++ b/include/lib/aarch32/smcc_helpers.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved. * * SPDX-License-Identifier: BSD-3-Clause */ @@ -22,7 +22,7 @@ #define SMC_CTX_LR_MON 0x80 #define SMC_CTX_SCR 0x84 #define SMC_CTX_PMCR 0x88 -#define SMC_CTX_SIZE 0x8C +#define SMC_CTX_SIZE 0x90 #ifndef __ASSEMBLY__ #include @@ -75,7 +75,13 @@ u_register_t lr_mon; u_register_t scr; u_register_t pmcr; -} smc_ctx_t; + /* + * The workaround for CVE-2017-5715 requires storing information in + * the bottom 3 bits of the stack pointer. Add a padding field to + * force the size of the struct to be a multiple of 8. + */ + u_register_t pad; +} smc_ctx_t __aligned(8); /* * Compile time assertions related to the 'smc_context' structure to @@ -99,6 +105,7 @@ CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \ assert_smc_ctx_spsr_mon_offset_mismatch); +CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned); CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch); /* Convenience macros to return from SMC handler */