aboutsummaryrefslogtreecommitdiff
path: root/lib/el3_runtime/aarch64/context.S
diff options
context:
space:
mode:
Diffstat (limited to 'lib/el3_runtime/aarch64/context.S')
-rw-r--r--lib/el3_runtime/aarch64/context.S456
1 files changed, 205 insertions, 251 deletions
diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S
index 75e214d9c..40e7ddfa1 100644
--- a/lib/el3_runtime/aarch64/context.S
+++ b/lib/el3_runtime/aarch64/context.S
@@ -30,7 +30,7 @@
/* -----------------------------------------------------
* The following function strictly follows the AArch64
- * PCS to use x9-x17 (temporary caller-saved registers)
+ * PCS to use x9-x16 (temporary caller-saved registers)
* to save EL2 system register context. It assumes that
* 'x0' is pointing to a 'el2_sys_regs' structure where
* the register context will be saved.
@@ -43,7 +43,6 @@
* ICH_LR<n>_EL2
* -----------------------------------------------------
*/
-
func el2_sysregs_context_save
mrs x9, actlr_el2
mrs x10, afsr0_el2
@@ -54,185 +53,153 @@ func el2_sysregs_context_save
stp x11, x12, [x0, #CTX_AFSR1_EL2]
mrs x13, cnthctl_el2
- mrs x14, cnthp_ctl_el2
+ mrs x14, cntvoff_el2
stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
- mrs x15, cnthp_cval_el2
- mrs x16, cnthp_tval_el2
- stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
-
- mrs x17, cntvoff_el2
- mrs x9, cptr_el2
- stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
+ mrs x15, cptr_el2
+ str x15, [x0, #CTX_CPTR_EL2]
- mrs x11, elr_el2
#if CTX_INCLUDE_AARCH32_REGS
- mrs x10, dbgvcr32_el2
- stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
-#else
- str x11, [x0, #CTX_ELR_EL2]
+ mrs x16, dbgvcr32_el2
+ str x16, [x0, #CTX_DBGVCR32_EL2]
#endif
- mrs x14, esr_el2
- mrs x15, far_el2
- stp x14, x15, [x0, #CTX_ESR_EL2]
+ mrs x9, elr_el2
+ mrs x10, esr_el2
+ stp x9, x10, [x0, #CTX_ELR_EL2]
- mrs x16, hacr_el2
- mrs x17, hcr_el2
- stp x16, x17, [x0, #CTX_HACR_EL2]
+ mrs x11, far_el2
+ mrs x12, hacr_el2
+ stp x11, x12, [x0, #CTX_FAR_EL2]
- mrs x9, hpfar_el2
- mrs x10, hstr_el2
- stp x9, x10, [x0, #CTX_HPFAR_EL2]
+ mrs x13, hcr_el2
+ mrs x14, hpfar_el2
+ stp x13, x14, [x0, #CTX_HCR_EL2]
- mrs x11, ICC_SRE_EL2
- mrs x12, ICH_HCR_EL2
- stp x11, x12, [x0, #CTX_ICC_SRE_EL2]
+ mrs x15, hstr_el2
+ mrs x16, ICC_SRE_EL2
+ stp x15, x16, [x0, #CTX_HSTR_EL2]
- mrs x13, ICH_VMCR_EL2
- mrs x14, mair_el2
- stp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
+ mrs x9, ICH_HCR_EL2
+ mrs x10, ICH_VMCR_EL2
+ stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
+
+ mrs x11, mair_el2
+ mrs x12, mdcr_el2
+ stp x11, x12, [x0, #CTX_MAIR_EL2]
- mrs x15, mdcr_el2
#if ENABLE_SPE_FOR_LOWER_ELS
- mrs x16, PMSCR_EL2
- stp x15, x16, [x0, #CTX_MDCR_EL2]
-#else
- str x15, [x0, #CTX_MDCR_EL2]
+ mrs x13, PMSCR_EL2
+ str x13, [x0, #CTX_PMSCR_EL2]
#endif
+ mrs x14, sctlr_el2
+ str x14, [x0, #CTX_SCTLR_EL2]
- mrs x17, sctlr_el2
- mrs x9, spsr_el2
- stp x17, x9, [x0, #CTX_SCTLR_EL2]
-
- mrs x10, sp_el2
- mrs x11, tcr_el2
- stp x10, x11, [x0, #CTX_SP_EL2]
+ mrs x15, spsr_el2
+ mrs x16, sp_el2
+ stp x15, x16, [x0, #CTX_SPSR_EL2]
- mrs x12, tpidr_el2
- mrs x13, ttbr0_el2
- stp x12, x13, [x0, #CTX_TPIDR_EL2]
+ mrs x9, tcr_el2
+ mrs x10, tpidr_el2
+ stp x9, x10, [x0, #CTX_TCR_EL2]
- mrs x14, vbar_el2
- mrs x15, vmpidr_el2
- stp x14, x15, [x0, #CTX_VBAR_EL2]
+ mrs x11, ttbr0_el2
+ mrs x12, vbar_el2
+ stp x11, x12, [x0, #CTX_TTBR0_EL2]
- mrs x16, vpidr_el2
- mrs x17, vtcr_el2
- stp x16, x17, [x0, #CTX_VPIDR_EL2]
+ mrs x13, vmpidr_el2
+ mrs x14, vpidr_el2
+ stp x13, x14, [x0, #CTX_VMPIDR_EL2]
- mrs x9, vttbr_el2
- str x9, [x0, #CTX_VTTBR_EL2]
+ mrs x15, vtcr_el2
+ mrs x16, vttbr_el2
+ stp x15, x16, [x0, #CTX_VTCR_EL2]
#if CTX_INCLUDE_MTE_REGS
- mrs x10, TFSR_EL2
- str x10, [x0, #CTX_TFSR_EL2]
+ mrs x9, TFSR_EL2
+ str x9, [x0, #CTX_TFSR_EL2]
#endif
#if ENABLE_MPAM_FOR_LOWER_ELS
- mrs x9, MPAM2_EL2
- mrs x10, MPAMHCR_EL2
- stp x9, x10, [x0, #CTX_MPAM2_EL2]
+ mrs x10, MPAM2_EL2
+ str x10, [x0, #CTX_MPAM2_EL2]
- mrs x11, MPAMVPM0_EL2
- mrs x12, MPAMVPM1_EL2
- stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
+ mrs x11, MPAMHCR_EL2
+ mrs x12, MPAMVPM0_EL2
+ stp x11, x12, [x0, #CTX_MPAMHCR_EL2]
- mrs x13, MPAMVPM2_EL2
- mrs x14, MPAMVPM3_EL2
- stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
+ mrs x13, MPAMVPM1_EL2
+ mrs x14, MPAMVPM2_EL2
+ stp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
- mrs x15, MPAMVPM4_EL2
- mrs x16, MPAMVPM5_EL2
- stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
+ mrs x15, MPAMVPM3_EL2
+ mrs x16, MPAMVPM4_EL2
+ stp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
- mrs x17, MPAMVPM6_EL2
- mrs x9, MPAMVPM7_EL2
- stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
+ mrs x9, MPAMVPM5_EL2
+ mrs x10, MPAMVPM6_EL2
+ stp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
- mrs x10, MPAMVPMV_EL2
- str x10, [x0, #CTX_MPAMVPMV_EL2]
+ mrs x11, MPAMVPM7_EL2
+ mrs x12, MPAMVPMV_EL2
+ stp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
#endif
-
#if ARM_ARCH_AT_LEAST(8, 6)
- mrs x11, HAFGRTR_EL2
- mrs x12, HDFGRTR_EL2
- stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
+ mrs x13, HAFGRTR_EL2
+ mrs x14, HDFGRTR_EL2
+ stp x13, x14, [x0, #CTX_HAFGRTR_EL2]
- mrs x13, HDFGWTR_EL2
- mrs x14, HFGITR_EL2
- stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
+ mrs x15, HDFGWTR_EL2
+ mrs x16, HFGITR_EL2
+ stp x15, x16, [x0, #CTX_HDFGWTR_EL2]
- mrs x15, HFGRTR_EL2
- mrs x16, HFGWTR_EL2
- stp x15, x16, [x0, #CTX_HFGRTR_EL2]
+ mrs x9, HFGRTR_EL2
+ mrs x10, HFGWTR_EL2
+ stp x9, x10, [x0, #CTX_HFGRTR_EL2]
- mrs x17, CNTPOFF_EL2
- str x17, [x0, #CTX_CNTPOFF_EL2]
+ mrs x11, CNTPOFF_EL2
+ str x11, [x0, #CTX_CNTPOFF_EL2]
#endif
#if ARM_ARCH_AT_LEAST(8, 4)
- mrs x9, cnthps_ctl_el2
- mrs x10, cnthps_cval_el2
- stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
-
- mrs x11, cnthps_tval_el2
- mrs x12, cnthvs_ctl_el2
- stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
-
- mrs x13, cnthvs_cval_el2
- mrs x14, cnthvs_tval_el2
- stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
-
- mrs x15, cnthv_ctl_el2
- mrs x16, cnthv_cval_el2
- stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
-
- mrs x17, cnthv_tval_el2
- mrs x9, contextidr_el2
- stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
+ mrs x12, contextidr_el2
+ str x12, [x0, #CTX_CONTEXTIDR_EL2]
#if CTX_INCLUDE_AARCH32_REGS
- mrs x10, sder32_el2
- str x10, [x0, #CTX_SDER32_EL2]
+ mrs x13, sder32_el2
+ str x13, [x0, #CTX_SDER32_EL2]
#endif
-
- mrs x11, ttbr1_el2
- str x11, [x0, #CTX_TTBR1_EL2]
-
- mrs x12, vdisr_el2
- str x12, [x0, #CTX_VDISR_EL2]
+ mrs x14, ttbr1_el2
+ mrs x15, vdisr_el2
+ stp x14, x15, [x0, #CTX_TTBR1_EL2]
#if CTX_INCLUDE_NEVE_REGS
- mrs x13, vncr_el2
- str x13, [x0, #CTX_VNCR_EL2]
+ mrs x16, vncr_el2
+ str x16, [x0, #CTX_VNCR_EL2]
#endif
- mrs x14, vsesr_el2
- str x14, [x0, #CTX_VSESR_EL2]
-
- mrs x15, vstcr_el2
- str x15, [x0, #CTX_VSTCR_EL2]
+ mrs x9, vsesr_el2
+ mrs x10, vstcr_el2
+ stp x9, x10, [x0, #CTX_VSESR_EL2]
- mrs x16, vsttbr_el2
- str x16, [x0, #CTX_VSTTBR_EL2]
-
- mrs x17, TRFCR_EL2
- str x17, [x0, #CTX_TRFCR_EL2]
+ mrs x11, vsttbr_el2
+ mrs x12, TRFCR_EL2
+ stp x11, x12, [x0, #CTX_VSTTBR_EL2]
#endif
#if ARM_ARCH_AT_LEAST(8, 5)
- mrs x9, scxtnum_el2
- str x9, [x0, #CTX_SCXTNUM_EL2]
+ mrs x13, scxtnum_el2
+ str x13, [x0, #CTX_SCXTNUM_EL2]
#endif
ret
endfunc el2_sysregs_context_save
+
/* -----------------------------------------------------
* The following function strictly follows the AArch64
- * PCS to use x9-x17 (temporary caller-saved registers)
+ * PCS to use x9-x16 (temporary caller-saved registers)
* to restore EL2 system register context. It assumes
* that 'x0' is pointing to a 'el2_sys_regs' structure
* from where the register context will be restored
@@ -246,7 +213,6 @@ endfunc el2_sysregs_context_save
* -----------------------------------------------------
*/
func el2_sysregs_context_restore
-
ldp x9, x10, [x0, #CTX_ACTLR_EL2]
msr actlr_el2, x9
msr afsr0_el2, x10
@@ -257,74 +223,66 @@ func el2_sysregs_context_restore
ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
msr cnthctl_el2, x13
- msr cnthp_ctl_el2, x14
+ msr cntvoff_el2, x14
- ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
- msr cnthp_cval_el2, x15
- msr cnthp_tval_el2, x16
-
- ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
- msr cntvoff_el2, x17
- msr cptr_el2, x9
+ ldr x15, [x0, #CTX_CPTR_EL2]
+ msr cptr_el2, x15
#if CTX_INCLUDE_AARCH32_REGS
- ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
- msr dbgvcr32_el2, x10
-#else
- ldr x11, [x0, #CTX_ELR_EL2]
+ ldr x16, [x0, #CTX_DBGVCR32_EL2]
+ msr dbgvcr32_el2, x16
#endif
- msr elr_el2, x11
- ldp x14, x15, [x0, #CTX_ESR_EL2]
- msr esr_el2, x14
- msr far_el2, x15
+ ldp x9, x10, [x0, #CTX_ELR_EL2]
+ msr elr_el2, x9
+ msr esr_el2, x10
+
+ ldp x11, x12, [x0, #CTX_FAR_EL2]
+ msr far_el2, x11
+ msr hacr_el2, x12
- ldp x16, x17, [x0, #CTX_HACR_EL2]
- msr hacr_el2, x16
- msr hcr_el2, x17
+ ldp x13, x14, [x0, #CTX_HCR_EL2]
+ msr hcr_el2, x13
+ msr hpfar_el2, x14
- ldp x9, x10, [x0, #CTX_HPFAR_EL2]
- msr hpfar_el2, x9
- msr hstr_el2, x10
+ ldp x15, x16, [x0, #CTX_HSTR_EL2]
+ msr hstr_el2, x15
+ msr ICC_SRE_EL2, x16
- ldp x11, x12, [x0, #CTX_ICC_SRE_EL2]
- msr ICC_SRE_EL2, x11
- msr ICH_HCR_EL2, x12
+ ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
+ msr ICH_HCR_EL2, x9
+ msr ICH_VMCR_EL2, x10
- ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
- msr ICH_VMCR_EL2, x13
- msr mair_el2, x14
+ ldp x11, x12, [x0, #CTX_MAIR_EL2]
+ msr mair_el2, x11
+ msr mdcr_el2, x12
#if ENABLE_SPE_FOR_LOWER_ELS
- ldp x15, x16, [x0, #CTX_MDCR_EL2]
- msr PMSCR_EL2, x16
-#else
- ldr x15, [x0, #CTX_MDCR_EL2]
+ ldr x13, [x0, #CTX_PMSCR_EL2]
+ msr PMSCR_EL2, x13
#endif
- msr mdcr_el2, x15
-
- ldp x17, x9, [x0, #CTX_SCTLR_EL2]
- msr sctlr_el2, x17
- msr spsr_el2, x9
+ ldr x14, [x0, #CTX_SCTLR_EL2]
+ msr sctlr_el2, x14
- ldp x10, x11, [x0, #CTX_SP_EL2]
- msr sp_el2, x10
- msr tcr_el2, x11
+ ldp x15, x16, [x0, #CTX_SPSR_EL2]
+ msr spsr_el2, x15
+ msr sp_el2, x16
- ldp x12, x13, [x0, #CTX_TPIDR_EL2]
- msr tpidr_el2, x12
- msr ttbr0_el2, x13
+ ldp x9, x10, [x0, #CTX_TCR_EL2]
+ msr tcr_el2, x9
+ msr tpidr_el2, x10
- ldp x13, x14, [x0, #CTX_VBAR_EL2]
- msr vbar_el2, x13
- msr vmpidr_el2, x14
+ ldp x11, x12, [x0, #CTX_TTBR0_EL2]
+ msr ttbr0_el2, x11
+ msr vbar_el2, x12
- ldp x15, x16, [x0, #CTX_VPIDR_EL2]
- msr vpidr_el2, x15
- msr vtcr_el2, x16
+ ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
+ msr vmpidr_el2, x13
+ msr vpidr_el2, x14
- ldr x17, [x0, #CTX_VTTBR_EL2]
- msr vttbr_el2, x17
+ ldp x15, x16, [x0, #CTX_VTCR_EL2]
+ msr vtcr_el2, x15
+ msr vttbr_el2, x16
#if CTX_INCLUDE_MTE_REGS
ldr x9, [x0, #CTX_TFSR_EL2]
@@ -332,100 +290,76 @@ func el2_sysregs_context_restore
#endif
#if ENABLE_MPAM_FOR_LOWER_ELS
- ldp x10, x11, [x0, #CTX_MPAM2_EL2]
+ ldr x10, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x10
- msr MPAMHCR_EL2, x11
- ldp x12, x13, [x0, #CTX_MPAMVPM0_EL2]
+ ldp x11, x12, [x0, #CTX_MPAMHCR_EL2]
+ msr MPAMHCR_EL2, x11
msr MPAMVPM0_EL2, x12
- msr MPAMVPM1_EL2, x13
- ldp x14, x15, [x0, #CTX_MPAMVPM2_EL2]
+ ldp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
+ msr MPAMVPM1_EL2, x13
msr MPAMVPM2_EL2, x14
- msr MPAMVPM3_EL2, x15
- ldp x16, x17, [x0, #CTX_MPAMVPM4_EL2]
+ ldp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
+ msr MPAMVPM3_EL2, x15
msr MPAMVPM4_EL2, x16
- msr MPAMVPM5_EL2, x17
- ldp x9, x10, [x0, #CTX_MPAMVPM6_EL2]
- msr MPAMVPM6_EL2, x9
- msr MPAMVPM7_EL2, x10
+ ldp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
+ msr MPAMVPM5_EL2, x9
+ msr MPAMVPM6_EL2, x10
- ldr x11, [x0, #CTX_MPAMVPMV_EL2]
- msr MPAMVPMV_EL2, x11
+ ldp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
+ msr MPAMVPM7_EL2, x11
+ msr MPAMVPMV_EL2, x12
#endif
#if ARM_ARCH_AT_LEAST(8, 6)
- ldp x12, x13, [x0, #CTX_HAFGRTR_EL2]
- msr HAFGRTR_EL2, x12
- msr HDFGRTR_EL2, x13
+ ldp x13, x14, [x0, #CTX_HAFGRTR_EL2]
+ msr HAFGRTR_EL2, x13
+ msr HDFGRTR_EL2, x14
- ldp x14, x15, [x0, #CTX_HDFGWTR_EL2]
- msr HDFGWTR_EL2, x14
- msr HFGITR_EL2, x15
+ ldp x15, x16, [x0, #CTX_HDFGWTR_EL2]
+ msr HDFGWTR_EL2, x15
+ msr HFGITR_EL2, x16
- ldp x16, x17, [x0, #CTX_HFGRTR_EL2]
- msr HFGRTR_EL2, x16
- msr HFGWTR_EL2, x17
+ ldp x9, x10, [x0, #CTX_HFGRTR_EL2]
+ msr HFGRTR_EL2, x9
+ msr HFGWTR_EL2, x10
- ldr x9, [x0, #CTX_CNTPOFF_EL2]
- msr CNTPOFF_EL2, x9
+ ldr x11, [x0, #CTX_CNTPOFF_EL2]
+ msr CNTPOFF_EL2, x11
#endif
#if ARM_ARCH_AT_LEAST(8, 4)
- ldp x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
- msr cnthps_ctl_el2, x10
- msr cnthps_cval_el2, x11
-
- ldp x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
- msr cnthps_tval_el2, x12
- msr cnthvs_ctl_el2, x13
-
- ldp x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
- msr cnthvs_cval_el2, x14
- msr cnthvs_tval_el2, x15
-
- ldp x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
- msr cnthv_ctl_el2, x16
- msr cnthv_cval_el2, x17
-
- ldp x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
- msr cnthv_tval_el2, x9
- msr contextidr_el2, x10
+ ldr x12, [x0, #CTX_CONTEXTIDR_EL2]
+ msr contextidr_el2, x12
#if CTX_INCLUDE_AARCH32_REGS
- ldr x11, [x0, #CTX_SDER32_EL2]
- msr sder32_el2, x11
+ ldr x13, [x0, #CTX_SDER32_EL2]
+ msr sder32_el2, x13
#endif
-
- ldr x12, [x0, #CTX_TTBR1_EL2]
- msr ttbr1_el2, x12
-
- ldr x13, [x0, #CTX_VDISR_EL2]
- msr vdisr_el2, x13
+ ldp x14, x15, [x0, #CTX_TTBR1_EL2]
+ msr ttbr1_el2, x14
+ msr vdisr_el2, x15
#if CTX_INCLUDE_NEVE_REGS
- ldr x14, [x0, #CTX_VNCR_EL2]
- msr vncr_el2, x14
+ ldr x16, [x0, #CTX_VNCR_EL2]
+ msr vncr_el2, x16
#endif
- ldr x15, [x0, #CTX_VSESR_EL2]
- msr vsesr_el2, x15
-
- ldr x16, [x0, #CTX_VSTCR_EL2]
- msr vstcr_el2, x16
-
- ldr x17, [x0, #CTX_VSTTBR_EL2]
- msr vsttbr_el2, x17
+ ldp x9, x10, [x0, #CTX_VSESR_EL2]
+ msr vsesr_el2, x9
+ msr vstcr_el2, x10
- ldr x9, [x0, #CTX_TRFCR_EL2]
- msr TRFCR_EL2, x9
+ ldp x11, x12, [x0, #CTX_VSTTBR_EL2]
+ msr vsttbr_el2, x11
+ msr TRFCR_EL2, x12
#endif
#if ARM_ARCH_AT_LEAST(8, 5)
- ldr x10, [x0, #CTX_SCXTNUM_EL2]
- msr scxtnum_el2, x10
+ ldr x13, [x0, #CTX_SCXTNUM_EL2]
+ msr scxtnum_el2, x13
#endif
ret
@@ -763,13 +697,14 @@ func save_gp_pmcr_pauth_regs
str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
/* ----------------------------------------------------------
- * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
- * meaning that ARMv8-PMU is not implemented and PMCR_EL0
- * should be saved in non-secure context.
+ * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
+ * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
+ * PMCR_EL0 should be saved in non-secure context.
* ----------------------------------------------------------
*/
+ mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
mrs x9, mdcr_el3
- tst x9, #MDCR_SCCD_BIT
+ tst x9, x10
bne 1f
/* Secure Cycle Counter is not disabled */
@@ -858,13 +793,14 @@ func restore_gp_pmcr_pauth_regs
/* ----------------------------------------------------------
* Back to Non-secure state.
- * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
- * meaning that ARMv8-PMU is not implemented and PMCR_EL0
- * should be restored from non-secure context.
+ * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
+ * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
+ * PMCR_EL0 should be restored from non-secure context.
* ----------------------------------------------------------
*/
+ mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
mrs x0, mdcr_el3
- tst x0, #MDCR_SCCD_BIT
+ tst x0, x1
bne 2f
ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
msr pmcr_el0, x0
@@ -965,6 +901,24 @@ func el3_exit
msr spsr_el3, x16
msr elr_el3, x17
+#if IMAGE_BL31
+ /* ----------------------------------------------------------
+ * Restore CPTR_EL3.
+ * ZCR is only restored if SVE is supported and enabled.
+ * Synchronization is required before zcr_el3 is addressed.
+ * ----------------------------------------------------------
+ */
+ ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
+ msr cptr_el3, x19
+
+ ands x19, x19, #CPTR_EZ_BIT
+ beq sve_not_enabled
+
+ isb
+ msr S3_6_C1_C2_0, x20 /* zcr_el3 */
+sve_not_enabled:
+#endif
+
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* ----------------------------------------------------------
* Restore mitigation state as it was on entry to EL3