diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h index 9d4ad3b..c7cf9f4 100644 --- a/include/arch/aarch64/arch.h +++ b/include/arch/aarch64/arch.h @@ -211,6 +211,11 @@ #define PARANGE_0101 U(48) #define PARANGE_0110 U(52) +#define ID_AA64MMFR0_EL1_FGT_SHIFT U(56) +#define ID_AA64MMFR0_EL1_FGT_MASK ULL(0xf) +#define ID_AA64MMFR0_EL1_FGT_SUPPORTED ULL(0x1) +#define ID_AA64MMFR0_EL1_FGT_NOT_SUPPORTED ULL(0x0) + #define ID_AA64MMFR0_EL1_TGRAN4_SHIFT U(28) #define ID_AA64MMFR0_EL1_TGRAN4_MASK ULL(0xf) #define ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED ULL(0x0) @@ -324,6 +329,7 @@ #define SCR_TWEDEL_SHIFT U(30) #define SCR_TWEDEL_MASK ULL(0xf) #define SCR_TWEDEn_BIT (UL(1) << 29) +#define SCR_FGTEN_BIT (U(1) << 27) #define SCR_ATA_BIT (U(1) << 26) #define SCR_FIEN_BIT (U(1) << 21) #define SCR_EEL2_BIT (U(1) << 18) diff --git a/include/arch/aarch64/arch_features.h b/include/arch/aarch64/arch_features.h index 321485a..9bcf305 100644 --- a/include/arch/aarch64/arch_features.h +++ b/include/arch/aarch64/arch_features.h @@ -64,6 +64,12 @@ ID_AA64MMFR1_EL1_TWED_MASK) == ID_AA64MMFR1_EL1_TWED_SUPPORTED); } +static inline bool is_armv8_6_fgt_present(void) +{ + return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_FGT_SHIFT) & + ID_AA64MMFR0_EL1_FGT_MASK) == ID_AA64MMFR0_EL1_FGT_SUPPORTED; +} + /* * Return MPAM version: * diff --git a/lib/el3_runtime/aarch64/context_mgmt.c b/lib/el3_runtime/aarch64/context_mgmt.c index 64a2d7b..1c5ba36 100644 --- a/lib/el3_runtime/aarch64/context_mgmt.c +++ b/lib/el3_runtime/aarch64/context_mgmt.c @@ -173,11 +173,18 @@ * SCR_EL3.HCE: Enable HVC instructions if next execution state is * AArch64 and next EL is EL2, or if next execution state is AArch32 and * next mode is Hyp. + * SCR_EL3.FGTEn: Enable Fine Grained Virtualization Traps under the + * same conditions as HVC instructions and when the processor supports + * ARMv8.6-FGT. */ if (((GET_RW(ep->spsr) == MODE_RW_64) && (GET_EL(ep->spsr) == MODE_EL2)) || ((GET_RW(ep->spsr) != MODE_RW_64) && (GET_M32(ep->spsr) == MODE32_hyp))) { scr_el3 |= SCR_HCE_BIT; + + if (is_armv8_6_fgt_present()) { + scr_el3 |= SCR_FGTEN_BIT; + } } /* Enable S-EL2 if the next EL is EL2 and security state is secure */