Page MenuHomePhabricator

Libel3 Runtimeaarch64contextsvslibel3 Runtimeaarch64contexts
Updated 1,222 Days AgoPublic

/*                                                            /*
 * Copyright (c) 2013-2021, ARM Limited and Contributors.      * Copyright (c) 2013-2021, ARM Limited and Contributors. 
 *                                                             *
 * SPDX-License-Identifier: BSD-3-Clause                       * SPDX-License-Identifier: BSD-3-Clause
 */                                                            */

#include <arch.h>                                             #include <arch.h>
#include <asm_macros.S>                                       #include <asm_macros.S>
#include <assert_macros.S>                                    #include <assert_macros.S>
#include <context.h>                                          #include <context.h>
#include <el_max_common_macros.S>                             #include <el_max_common_macros.S>

#if CTX_INCLUDE_EL2_REGS                                      #if CTX_INCLUDE_EL2_REGS
        .global el2_sysregs_context_save                              .global el2_sysregs_context_save
        .global el2_sysregs_context_restore                           .global el2_sysregs_context_restore
#endif                                                        #endif

        .global el1_sysregs_context_save                              .global el1_sysregs_context_save
        .global el1_sysregs_context_restore                           .global el1_sysregs_context_restore
#if CTX_INCLUDE_FPREGS                                        #if CTX_INCLUDE_FPREGS
        .global fpregs_context_save                                   .global fpregs_context_save
        .global fpregs_context_restore                                .global fpregs_context_restore
#endif                                                        #endif
        .global save_gp_pmcr_pauth_regs                               .global save_gp_pmcr_pauth_regs
        .global restore_gp_pmcr_pauth_regs                            .global restore_gp_pmcr_pauth_regs
        .global save_and_update_ptw_el1_sys_regs                      .global save_and_update_ptw_el1_sys_regs
        .global el3_exit                                              .global el3_exit

#if CTX_INCLUDE_EL2_REGS                                      #if CTX_INCLUDE_EL2_REGS

/* -----------------------------------------------------      /* -----------------------------------------------------
 * The following function strictly follows the AArch64         * The following function strictly follows the AArch64
 * PCS to use x9-x16 (temporary caller-saved registers)        * PCS to use x9-x16 (temporary caller-saved registers)
 * to save EL2 system register context. It assumes that        * to save EL2 system register context. It assumes that
 * 'x0' is pointing to a 'el2_sys_regs' structure where        * 'x0' is pointing to a 'el2_sys_regs' structure where
 * the register context will be saved.                         * the register context will be saved.
 *                                                             *
 * The following registers are not added.                      * The following registers are not added.
 * AMEVCNTVOFF0<n>_EL2                                         * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2                                         * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2                                             * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2                                             * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2                                               * ICH_LR<n>_EL2
 * -----------------------------------------------------       * -----------------------------------------------------
 */                                                            */
func el2_sysregs_context_save                                 func el2_sysregs_context_save
        mrs     x9, actlr_el2                                         mrs     x9, actlr_el2
        mrs     x10, afsr0_el2                                        mrs     x10, afsr0_el2
        stp     x9, x10, [x0, #CTX_ACTLR_EL2]                         stp     x9, x10, [x0, #CTX_ACTLR_EL2]

        mrs     x11, afsr1_el2                                        mrs     x11, afsr1_el2
        mrs     x12, amair_el2                                        mrs     x12, amair_el2
        stp     x11, x12, [x0, #CTX_AFSR1_EL2]                        stp     x11, x12, [x0, #CTX_AFSR1_EL2]

        mrs     x13, cnthctl_el2                                      mrs     x13, cnthctl_el2
        mrs     x14, cntvoff_el2                                      mrs     x14, cntvoff_el2
        stp     x13, x14, [x0, #CTX_CNTHCTL_EL2]                      stp     x13, x14, [x0, #CTX_CNTHCTL_EL2]

        mrs     x15, cptr_el2                                         mrs     x15, cptr_el2
        str     x15, [x0, #CTX_CPTR_EL2]                              str     x15, [x0, #CTX_CPTR_EL2]

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        mrs     x16, dbgvcr32_el2                                     mrs     x16, dbgvcr32_el2
        str     x16, [x0, #CTX_DBGVCR32_EL2]                          str     x16, [x0, #CTX_DBGVCR32_EL2]
#endif                                                        #endif

        mrs     x9, elr_el2                                           mrs     x9, elr_el2
        mrs     x10, esr_el2                                          mrs     x10, esr_el2
        stp     x9, x10, [x0, #CTX_ELR_EL2]                           stp     x9, x10, [x0, #CTX_ELR_EL2]

        mrs     x11, far_el2                                          mrs     x11, far_el2
        mrs     x12, hacr_el2                                         mrs     x12, hacr_el2
        stp     x11, x12, [x0, #CTX_FAR_EL2]                          stp     x11, x12, [x0, #CTX_FAR_EL2]

        mrs     x13, hcr_el2                                          mrs     x13, hcr_el2
        mrs     x14, hpfar_el2                                        mrs     x14, hpfar_el2
        stp     x13, x14, [x0, #CTX_HCR_EL2]                          stp     x13, x14, [x0, #CTX_HCR_EL2]

        mrs     x15, hstr_el2                                         mrs     x15, hstr_el2
        mrs     x16, ICC_SRE_EL2                                      mrs     x16, ICC_SRE_EL2
        stp     x15, x16, [x0, #CTX_HSTR_EL2]                         stp     x15, x16, [x0, #CTX_HSTR_EL2]

        mrs     x9, ICH_HCR_EL2                                       mrs     x9, ICH_HCR_EL2
        mrs     x10, ICH_VMCR_EL2                                     mrs     x10, ICH_VMCR_EL2
        stp     x9, x10, [x0, #CTX_ICH_HCR_EL2]                       stp     x9, x10, [x0, #CTX_ICH_HCR_EL2]

        mrs     x11, mair_el2                                         mrs     x11, mair_el2
        mrs     x12, mdcr_el2                                         mrs     x12, mdcr_el2
        stp     x11, x12, [x0, #CTX_MAIR_EL2]                         stp     x11, x12, [x0, #CTX_MAIR_EL2]

#if ENABLE_SPE_FOR_LOWER_ELS                                  #if ENABLE_SPE_FOR_LOWER_ELS
        mrs     x13, PMSCR_EL2                                        mrs     x13, PMSCR_EL2
        str     x13, [x0, #CTX_PMSCR_EL2]                             str     x13, [x0, #CTX_PMSCR_EL2]
#endif                                                        #endif
        mrs     x14, sctlr_el2                                        mrs     x14, sctlr_el2
        str     x14, [x0, #CTX_SCTLR_EL2]                             str     x14, [x0, #CTX_SCTLR_EL2]

        mrs     x15, spsr_el2                                         mrs     x15, spsr_el2
        mrs     x16, sp_el2                                           mrs     x16, sp_el2
        stp     x15, x16, [x0, #CTX_SPSR_EL2]                         stp     x15, x16, [x0, #CTX_SPSR_EL2]

        mrs     x9, tcr_el2                                           mrs     x9, tcr_el2
        mrs     x10, tpidr_el2                                        mrs     x10, tpidr_el2
        stp     x9, x10, [x0, #CTX_TCR_EL2]                           stp     x9, x10, [x0, #CTX_TCR_EL2]

        mrs     x11, ttbr0_el2                                        mrs     x11, ttbr0_el2
        mrs     x12, vbar_el2                                         mrs     x12, vbar_el2
        stp     x11, x12, [x0, #CTX_TTBR0_EL2]                        stp     x11, x12, [x0, #CTX_TTBR0_EL2]

        mrs     x13, vmpidr_el2                                       mrs     x13, vmpidr_el2
        mrs     x14, vpidr_el2                                        mrs     x14, vpidr_el2
        stp     x13, x14, [x0, #CTX_VMPIDR_EL2]                       stp     x13, x14, [x0, #CTX_VMPIDR_EL2]

        mrs     x15, vtcr_el2                                         mrs     x15, vtcr_el2
        mrs     x16, vttbr_el2                                        mrs     x16, vttbr_el2
        stp     x15, x16, [x0, #CTX_VTCR_EL2]                         stp     x15, x16, [x0, #CTX_VTCR_EL2]

#if CTX_INCLUDE_MTE_REGS                                      #if CTX_INCLUDE_MTE_REGS
        mrs     x9, TFSR_EL2                                          mrs     x9, TFSR_EL2
        str     x9, [x0, #CTX_TFSR_EL2]                               str     x9, [x0, #CTX_TFSR_EL2]
#endif                                                        #endif

#if ENABLE_MPAM_FOR_LOWER_ELS                                 #if ENABLE_MPAM_FOR_LOWER_ELS
        mrs     x10, MPAM2_EL2                                        mrs     x10, MPAM2_EL2
        str     x10, [x0, #CTX_MPAM2_EL2]                             str     x10, [x0, #CTX_MPAM2_EL2]

        mrs     x11, MPAMHCR_EL2                                      mrs     x11, MPAMHCR_EL2
        mrs     x12, MPAMVPM0_EL2                                     mrs     x12, MPAMVPM0_EL2
        stp     x11, x12, [x0, #CTX_MPAMHCR_EL2]                      stp     x11, x12, [x0, #CTX_MPAMHCR_EL2]

        mrs     x13, MPAMVPM1_EL2                                     mrs     x13, MPAMVPM1_EL2
        mrs     x14, MPAMVPM2_EL2                                     mrs     x14, MPAMVPM2_EL2
        stp     x13, x14, [x0, #CTX_MPAMVPM1_EL2]                     stp     x13, x14, [x0, #CTX_MPAMVPM1_EL2]

        mrs     x15, MPAMVPM3_EL2                                     mrs     x15, MPAMVPM3_EL2
        mrs     x16, MPAMVPM4_EL2                                     mrs     x16, MPAMVPM4_EL2
        stp     x15, x16, [x0, #CTX_MPAMVPM3_EL2]                     stp     x15, x16, [x0, #CTX_MPAMVPM3_EL2]

        mrs     x9, MPAMVPM5_EL2                                      mrs     x9, MPAMVPM5_EL2
        mrs     x10, MPAMVPM6_EL2                                     mrs     x10, MPAMVPM6_EL2
        stp     x9, x10, [x0, #CTX_MPAMVPM5_EL2]                      stp     x9, x10, [x0, #CTX_MPAMVPM5_EL2]

        mrs     x11, MPAMVPM7_EL2                                     mrs     x11, MPAMVPM7_EL2
        mrs     x12, MPAMVPMV_EL2                                     mrs     x12, MPAMVPMV_EL2
        stp     x11, x12, [x0, #CTX_MPAMVPM7_EL2]                     stp     x11, x12, [x0, #CTX_MPAMVPM7_EL2]
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 6)                                   #if ARM_ARCH_AT_LEAST(8, 6)
        mrs     x13, HAFGRTR_EL2                                      mrs     x13, HAFGRTR_EL2
        mrs     x14, HDFGRTR_EL2                                      mrs     x14, HDFGRTR_EL2
        stp     x13, x14, [x0, #CTX_HAFGRTR_EL2]                      stp     x13, x14, [x0, #CTX_HAFGRTR_EL2]

        mrs     x15, HDFGWTR_EL2                                      mrs     x15, HDFGWTR_EL2
        mrs     x16, HFGITR_EL2                                       mrs     x16, HFGITR_EL2
        stp     x15, x16, [x0, #CTX_HDFGWTR_EL2]                      stp     x15, x16, [x0, #CTX_HDFGWTR_EL2]

        mrs     x9, HFGRTR_EL2                                        mrs     x9, HFGRTR_EL2
        mrs     x10, HFGWTR_EL2                                       mrs     x10, HFGWTR_EL2
        stp     x9, x10, [x0, #CTX_HFGRTR_EL2]                        stp     x9, x10, [x0, #CTX_HFGRTR_EL2]

        mrs     x11, CNTPOFF_EL2                                      mrs     x11, CNTPOFF_EL2
        str     x11, [x0, #CTX_CNTPOFF_EL2]                           str     x11, [x0, #CTX_CNTPOFF_EL2]
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 4)                                   #if ARM_ARCH_AT_LEAST(8, 4)
        mrs     x12, contextidr_el2                                   mrs     x12, contextidr_el2
        str     x12, [x0, #CTX_CONTEXTIDR_EL2]                        str     x12, [x0, #CTX_CONTEXTIDR_EL2]

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        mrs     x13, sder32_el2                                       mrs     x13, sder32_el2
        str     x13, [x0, #CTX_SDER32_EL2]                            str     x13, [x0, #CTX_SDER32_EL2]
#endif                                                        #endif
        mrs     x14, ttbr1_el2                                        mrs     x14, ttbr1_el2
        mrs     x15, vdisr_el2                                        mrs     x15, vdisr_el2
        stp     x14, x15, [x0, #CTX_TTBR1_EL2]                        stp     x14, x15, [x0, #CTX_TTBR1_EL2]

#if CTX_INCLUDE_NEVE_REGS                                     #if CTX_INCLUDE_NEVE_REGS
        mrs     x16, vncr_el2                                         mrs     x16, vncr_el2
        str     x16, [x0, #CTX_VNCR_EL2]                              str     x16, [x0, #CTX_VNCR_EL2]
#endif                                                        #endif

        mrs     x9, vsesr_el2                                         mrs     x9, vsesr_el2
        mrs     x10, vstcr_el2                                        mrs     x10, vstcr_el2
        stp     x9, x10, [x0, #CTX_VSESR_EL2]                         stp     x9, x10, [x0, #CTX_VSESR_EL2]

        mrs     x11, vsttbr_el2                                       mrs     x11, vsttbr_el2
        mrs     x12, TRFCR_EL2                                        mrs     x12, TRFCR_EL2
        stp     x11, x12, [x0, #CTX_VSTTBR_EL2]                       stp     x11, x12, [x0, #CTX_VSTTBR_EL2]
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 5)                                   #if ARM_ARCH_AT_LEAST(8, 5)
        mrs     x13, scxtnum_el2                                      mrs     x13, scxtnum_el2
        str     x13, [x0, #CTX_SCXTNUM_EL2]                           str     x13, [x0, #CTX_SCXTNUM_EL2]
#endif                                                        #endif

        ret                                                           ret
endfunc el2_sysregs_context_save                              endfunc el2_sysregs_context_save


/* -----------------------------------------------------      /* -----------------------------------------------------
 * The following function strictly follows the AArch64         * The following function strictly follows the AArch64
 * PCS to use x9-x16 (temporary caller-saved registers)        * PCS to use x9-x16 (temporary caller-saved registers)
 * to restore EL2 system register context.  It assumes         * to restore EL2 system register context.  It assumes
 * that 'x0' is pointing to a 'el2_sys_regs' structure         * that 'x0' is pointing to a 'el2_sys_regs' structure
 * from where the register context will be restored            * from where the register context will be restored

 * The following registers are not restored                    * The following registers are not restored
 * AMEVCNTVOFF0<n>_EL2                                         * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2                                         * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2                                             * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2                                             * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2                                               * ICH_LR<n>_EL2
 * -----------------------------------------------------       * -----------------------------------------------------
 */                                                            */
func el2_sysregs_context_restore                              func el2_sysregs_context_restore
        ldp     x9, x10, [x0, #CTX_ACTLR_EL2]                         ldp     x9, x10, [x0, #CTX_ACTLR_EL2]
        msr     actlr_el2, x9                                         msr     actlr_el2, x9
        msr     afsr0_el2, x10                                        msr     afsr0_el2, x10

        ldp     x11, x12, [x0, #CTX_AFSR1_EL2]                        ldp     x11, x12, [x0, #CTX_AFSR1_EL2]
        msr     afsr1_el2, x11                                        msr     afsr1_el2, x11
        msr     amair_el2, x12                                        msr     amair_el2, x12

        ldp     x13, x14, [x0, #CTX_CNTHCTL_EL2]                      ldp     x13, x14, [x0, #CTX_CNTHCTL_EL2]
        msr     cnthctl_el2, x13                                      msr     cnthctl_el2, x13
        msr     cntvoff_el2, x14                                      msr     cntvoff_el2, x14

        ldr     x15, [x0, #CTX_CPTR_EL2]                              ldr     x15, [x0, #CTX_CPTR_EL2]
        msr     cptr_el2, x15                                         msr     cptr_el2, x15

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        ldr     x16, [x0, #CTX_DBGVCR32_EL2]                          ldr     x16, [x0, #CTX_DBGVCR32_EL2]
        msr     dbgvcr32_el2, x16                                     msr     dbgvcr32_el2, x16
#endif                                                        #endif

        ldp     x9, x10, [x0, #CTX_ELR_EL2]                           ldp     x9, x10, [x0, #CTX_ELR_EL2]
        msr     elr_el2, x9                                           msr     elr_el2, x9
        msr     esr_el2, x10                                          msr     esr_el2, x10

        ldp     x11, x12, [x0, #CTX_FAR_EL2]                          ldp     x11, x12, [x0, #CTX_FAR_EL2]
        msr     far_el2, x11                                          msr     far_el2, x11
        msr     hacr_el2, x12                                         msr     hacr_el2, x12

        ldp     x13, x14, [x0, #CTX_HCR_EL2]                          ldp     x13, x14, [x0, #CTX_HCR_EL2]
        msr     hcr_el2, x13                                          msr     hcr_el2, x13
        msr     hpfar_el2, x14                                        msr     hpfar_el2, x14

        ldp     x15, x16, [x0, #CTX_HSTR_EL2]                         ldp     x15, x16, [x0, #CTX_HSTR_EL2]
        msr     hstr_el2, x15                                         msr     hstr_el2, x15
        msr     ICC_SRE_EL2, x16                                      msr     ICC_SRE_EL2, x16

        ldp     x9, x10, [x0, #CTX_ICH_HCR_EL2]                       ldp     x9, x10, [x0, #CTX_ICH_HCR_EL2]
        msr     ICH_HCR_EL2, x9                                       msr     ICH_HCR_EL2, x9
        msr     ICH_VMCR_EL2, x10                                     msr     ICH_VMCR_EL2, x10

        ldp     x11, x12, [x0, #CTX_MAIR_EL2]                         ldp     x11, x12, [x0, #CTX_MAIR_EL2]
        msr     mair_el2, x11                                         msr     mair_el2, x11
        msr     mdcr_el2, x12                                         msr     mdcr_el2, x12

#if ENABLE_SPE_FOR_LOWER_ELS                                  #if ENABLE_SPE_FOR_LOWER_ELS
        ldr     x13, [x0, #CTX_PMSCR_EL2]                             ldr     x13, [x0, #CTX_PMSCR_EL2]
        msr     PMSCR_EL2, x13                                        msr     PMSCR_EL2, x13
#endif                                                        #endif
        ldr     x14, [x0, #CTX_SCTLR_EL2]                             ldr     x14, [x0, #CTX_SCTLR_EL2]
        msr     sctlr_el2, x14                                        msr     sctlr_el2, x14

        ldp     x15, x16, [x0, #CTX_SPSR_EL2]                         ldp     x15, x16, [x0, #CTX_SPSR_EL2]
        msr     spsr_el2, x15                                         msr     spsr_el2, x15
        msr     sp_el2, x16                                           msr     sp_el2, x16

        ldp     x9, x10, [x0, #CTX_TCR_EL2]                           ldp     x9, x10, [x0, #CTX_TCR_EL2]
        msr     tcr_el2, x9                                           msr     tcr_el2, x9
        msr     tpidr_el2, x10                                        msr     tpidr_el2, x10

        ldp     x11, x12, [x0, #CTX_TTBR0_EL2]                        ldp     x11, x12, [x0, #CTX_TTBR0_EL2]
        msr     ttbr0_el2, x11                                        msr     ttbr0_el2, x11
        msr     vbar_el2, x12                                         msr     vbar_el2, x12

        ldp     x13, x14, [x0, #CTX_VMPIDR_EL2]                       ldp     x13, x14, [x0, #CTX_VMPIDR_EL2]
        msr     vmpidr_el2, x13                                       msr     vmpidr_el2, x13
        msr     vpidr_el2, x14                                        msr     vpidr_el2, x14

        ldp     x15, x16, [x0, #CTX_VTCR_EL2]                         ldp     x15, x16, [x0, #CTX_VTCR_EL2]
        msr     vtcr_el2, x15                                         msr     vtcr_el2, x15
        msr     vttbr_el2, x16                                        msr     vttbr_el2, x16

#if CTX_INCLUDE_MTE_REGS                                      #if CTX_INCLUDE_MTE_REGS
        ldr     x9, [x0, #CTX_TFSR_EL2]                               ldr     x9, [x0, #CTX_TFSR_EL2]
        msr     TFSR_EL2, x9                                          msr     TFSR_EL2, x9
#endif                                                        #endif

#if ENABLE_MPAM_FOR_LOWER_ELS                                 #if ENABLE_MPAM_FOR_LOWER_ELS
        ldr     x10, [x0, #CTX_MPAM2_EL2]                             ldr     x10, [x0, #CTX_MPAM2_EL2]
        msr     MPAM2_EL2, x10                                        msr     MPAM2_EL2, x10

        ldp     x11, x12, [x0, #CTX_MPAMHCR_EL2]                      ldp     x11, x12, [x0, #CTX_MPAMHCR_EL2]
        msr     MPAMHCR_EL2, x11                                      msr     MPAMHCR_EL2, x11
        msr     MPAMVPM0_EL2, x12                                     msr     MPAMVPM0_EL2, x12

        ldp     x13, x14, [x0, #CTX_MPAMVPM1_EL2]                     ldp     x13, x14, [x0, #CTX_MPAMVPM1_EL2]
        msr     MPAMVPM1_EL2, x13                                     msr     MPAMVPM1_EL2, x13
        msr     MPAMVPM2_EL2, x14                                     msr     MPAMVPM2_EL2, x14

        ldp     x15, x16, [x0, #CTX_MPAMVPM3_EL2]                     ldp     x15, x16, [x0, #CTX_MPAMVPM3_EL2]
        msr     MPAMVPM3_EL2, x15                                     msr     MPAMVPM3_EL2, x15
        msr     MPAMVPM4_EL2, x16                                     msr     MPAMVPM4_EL2, x16

        ldp     x9, x10, [x0, #CTX_MPAMVPM5_EL2]                      ldp     x9, x10, [x0, #CTX_MPAMVPM5_EL2]
        msr     MPAMVPM5_EL2, x9                                      msr     MPAMVPM5_EL2, x9
        msr     MPAMVPM6_EL2, x10                                     msr     MPAMVPM6_EL2, x10

        ldp     x11, x12, [x0, #CTX_MPAMVPM7_EL2]                     ldp     x11, x12, [x0, #CTX_MPAMVPM7_EL2]
        msr     MPAMVPM7_EL2, x11                                     msr     MPAMVPM7_EL2, x11
        msr     MPAMVPMV_EL2, x12                                     msr     MPAMVPMV_EL2, x12
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 6)                                   #if ARM_ARCH_AT_LEAST(8, 6)
        ldp     x13, x14, [x0, #CTX_HAFGRTR_EL2]                      ldp     x13, x14, [x0, #CTX_HAFGRTR_EL2]
        msr     HAFGRTR_EL2, x13                                      msr     HAFGRTR_EL2, x13
        msr     HDFGRTR_EL2, x14                                      msr     HDFGRTR_EL2, x14

        ldp     x15, x16, [x0, #CTX_HDFGWTR_EL2]                      ldp     x15, x16, [x0, #CTX_HDFGWTR_EL2]
        msr     HDFGWTR_EL2, x15                                      msr     HDFGWTR_EL2, x15
        msr     HFGITR_EL2, x16                                       msr     HFGITR_EL2, x16

        ldp     x9, x10, [x0, #CTX_HFGRTR_EL2]                        ldp     x9, x10, [x0, #CTX_HFGRTR_EL2]
        msr     HFGRTR_EL2, x9                                        msr     HFGRTR_EL2, x9
        msr     HFGWTR_EL2, x10                                       msr     HFGWTR_EL2, x10

        ldr     x11, [x0, #CTX_CNTPOFF_EL2]                           ldr     x11, [x0, #CTX_CNTPOFF_EL2]
        msr     CNTPOFF_EL2, x11                                      msr     CNTPOFF_EL2, x11
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 4)                                   #if ARM_ARCH_AT_LEAST(8, 4)
        ldr     x12, [x0, #CTX_CONTEXTIDR_EL2]                        ldr     x12, [x0, #CTX_CONTEXTIDR_EL2]
        msr     contextidr_el2, x12                                   msr     contextidr_el2, x12

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        ldr     x13, [x0, #CTX_SDER32_EL2]                            ldr     x13, [x0, #CTX_SDER32_EL2]
        msr     sder32_el2, x13                                       msr     sder32_el2, x13
#endif                                                        #endif
        ldp     x14, x15, [x0, #CTX_TTBR1_EL2]                        ldp     x14, x15, [x0, #CTX_TTBR1_EL2]
        msr     ttbr1_el2, x14                                        msr     ttbr1_el2, x14
        msr     vdisr_el2, x15                                        msr     vdisr_el2, x15

#if CTX_INCLUDE_NEVE_REGS                                     #if CTX_INCLUDE_NEVE_REGS
        ldr     x16, [x0, #CTX_VNCR_EL2]                              ldr     x16, [x0, #CTX_VNCR_EL2]
        msr     vncr_el2, x16                                         msr     vncr_el2, x16
#endif                                                        #endif

        ldp     x9, x10, [x0, #CTX_VSESR_EL2]                         ldp     x9, x10, [x0, #CTX_VSESR_EL2]
        msr     vsesr_el2, x9                                         msr     vsesr_el2, x9
        msr     vstcr_el2, x10                                        msr     vstcr_el2, x10

        ldp     x11, x12, [x0, #CTX_VSTTBR_EL2]                       ldp     x11, x12, [x0, #CTX_VSTTBR_EL2]
        msr     vsttbr_el2, x11                                       msr     vsttbr_el2, x11
        msr     TRFCR_EL2, x12                                        msr     TRFCR_EL2, x12
#endif                                                        #endif

#if ARM_ARCH_AT_LEAST(8, 5)                                   #if ARM_ARCH_AT_LEAST(8, 5)
        ldr     x13, [x0, #CTX_SCXTNUM_EL2]                           ldr     x13, [x0, #CTX_SCXTNUM_EL2]
        msr     scxtnum_el2, x13                                      msr     scxtnum_el2, x13
#endif                                                        #endif

        ret                                                           ret
endfunc el2_sysregs_context_restore                           endfunc el2_sysregs_context_restore

#endif /* CTX_INCLUDE_EL2_REGS */                             #endif /* CTX_INCLUDE_EL2_REGS */

/* -------------------------------------------------------    /* -------------------------------------------------------
 * The following function strictly follows the AArch64 PCS     * The following function strictly follows the AArch64 PCS
 * x9-x17 (temporary caller-saved registers) to save EL1 s     * x9-x17 (temporary caller-saved registers) to save EL1 s
 * register context. It assumes that 'x0' is pointing to a     * register context. It assumes that 'x0' is pointing to a
 * 'el1_sys_regs' structure where the register context wil     * 'el1_sys_regs' structure where the register context wil
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func el1_sysregs_context_save                                 func el1_sysregs_context_save

        mrs     x9, spsr_el1                                          mrs     x9, spsr_el1
        mrs     x10, elr_el1                                          mrs     x10, elr_el1
        stp     x9, x10, [x0, #CTX_SPSR_EL1]                          stp     x9, x10, [x0, #CTX_SPSR_EL1]

#if !ERRATA_SPECULATIVE_AT                                    #if !ERRATA_SPECULATIVE_AT
        mrs     x15, sctlr_el1                                        mrs     x15, sctlr_el1
        mrs     x16, tcr_el1                                          mrs     x16, tcr_el1
        stp     x15, x16, [x0, #CTX_SCTLR_EL1]                        stp     x15, x16, [x0, #CTX_SCTLR_EL1]
#endif                                                        #endif

        mrs     x17, cpacr_el1                                        mrs     x17, cpacr_el1
        mrs     x9, csselr_el1                                        mrs     x9, csselr_el1
        stp     x17, x9, [x0, #CTX_CPACR_EL1]                         stp     x17, x9, [x0, #CTX_CPACR_EL1]

        mrs     x10, sp_el1                                           mrs     x10, sp_el1
        mrs     x11, esr_el1                                          mrs     x11, esr_el1
        stp     x10, x11, [x0, #CTX_SP_EL1]                           stp     x10, x11, [x0, #CTX_SP_EL1]

        mrs     x12, ttbr0_el1                                        mrs     x12, ttbr0_el1
        mrs     x13, ttbr1_el1                                        mrs     x13, ttbr1_el1
        stp     x12, x13, [x0, #CTX_TTBR0_EL1]                        stp     x12, x13, [x0, #CTX_TTBR0_EL1]

        mrs     x14, mair_el1                                         mrs     x14, mair_el1
        mrs     x15, amair_el1                                        mrs     x15, amair_el1
        stp     x14, x15, [x0, #CTX_MAIR_EL1]                         stp     x14, x15, [x0, #CTX_MAIR_EL1]

        mrs     x16, actlr_el1                                        mrs     x16, actlr_el1
        mrs     x17, tpidr_el1                                        mrs     x17, tpidr_el1
        stp     x16, x17, [x0, #CTX_ACTLR_EL1]                        stp     x16, x17, [x0, #CTX_ACTLR_EL1]

        mrs     x9, tpidr_el0                                         mrs     x9, tpidr_el0
        mrs     x10, tpidrro_el0                                      mrs     x10, tpidrro_el0
        stp     x9, x10, [x0, #CTX_TPIDR_EL0]                         stp     x9, x10, [x0, #CTX_TPIDR_EL0]

        mrs     x13, par_el1                                          mrs     x13, par_el1
        mrs     x14, far_el1                                          mrs     x14, far_el1
        stp     x13, x14, [x0, #CTX_PAR_EL1]                          stp     x13, x14, [x0, #CTX_PAR_EL1]

        mrs     x15, afsr0_el1                                        mrs     x15, afsr0_el1
        mrs     x16, afsr1_el1                                        mrs     x16, afsr1_el1
        stp     x15, x16, [x0, #CTX_AFSR0_EL1]                        stp     x15, x16, [x0, #CTX_AFSR0_EL1]

        mrs     x17, contextidr_el1                                   mrs     x17, contextidr_el1
        mrs     x9, vbar_el1                                          mrs     x9, vbar_el1
        stp     x17, x9, [x0, #CTX_CONTEXTIDR_EL1]                    stp     x17, x9, [x0, #CTX_CONTEXTIDR_EL1]

        /* Save AArch32 system registers if the build has             /* Save AArch32 system registers if the build has 
#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        mrs     x11, spsr_abt                                         mrs     x11, spsr_abt
        mrs     x12, spsr_und                                         mrs     x12, spsr_und
        stp     x11, x12, [x0, #CTX_SPSR_ABT]                         stp     x11, x12, [x0, #CTX_SPSR_ABT]

        mrs     x13, spsr_irq                                         mrs     x13, spsr_irq
        mrs     x14, spsr_fiq                                         mrs     x14, spsr_fiq
        stp     x13, x14, [x0, #CTX_SPSR_IRQ]                         stp     x13, x14, [x0, #CTX_SPSR_IRQ]

        mrs     x15, dacr32_el2                                       mrs     x15, dacr32_el2
        mrs     x16, ifsr32_el2                                       mrs     x16, ifsr32_el2
        stp     x15, x16, [x0, #CTX_DACR32_EL2]                       stp     x15, x16, [x0, #CTX_DACR32_EL2]
#endif                                                        #endif

        /* Save NS timer registers if the build has instru            /* Save NS timer registers if the build has instru
#if NS_TIMER_SWITCH                                           #if NS_TIMER_SWITCH
        mrs     x10, cntp_ctl_el0                                     mrs     x10, cntp_ctl_el0
        mrs     x11, cntp_cval_el0                                    mrs     x11, cntp_cval_el0
        stp     x10, x11, [x0, #CTX_CNTP_CTL_EL0]                     stp     x10, x11, [x0, #CTX_CNTP_CTL_EL0]

        mrs     x12, cntv_ctl_el0                                     mrs     x12, cntv_ctl_el0
        mrs     x13, cntv_cval_el0                                    mrs     x13, cntv_cval_el0
        stp     x12, x13, [x0, #CTX_CNTV_CTL_EL0]                     stp     x12, x13, [x0, #CTX_CNTV_CTL_EL0]

        mrs     x14, cntkctl_el1                                      mrs     x14, cntkctl_el1
        str     x14, [x0, #CTX_CNTKCTL_EL1]                           str     x14, [x0, #CTX_CNTKCTL_EL1]
#endif                                                        #endif

        /* Save MTE system registers if the build has inst            /* Save MTE system registers if the build has inst
#if CTX_INCLUDE_MTE_REGS                                      #if CTX_INCLUDE_MTE_REGS
        mrs     x15, TFSRE0_EL1                                       mrs     x15, TFSRE0_EL1
        mrs     x16, TFSR_EL1                                         mrs     x16, TFSR_EL1
        stp     x15, x16, [x0, #CTX_TFSRE0_EL1]                       stp     x15, x16, [x0, #CTX_TFSRE0_EL1]

        mrs     x9, RGSR_EL1                                          mrs     x9, RGSR_EL1
        mrs     x10, GCR_EL1                                          mrs     x10, GCR_EL1
        stp     x9, x10, [x0, #CTX_RGSR_EL1]                          stp     x9, x10, [x0, #CTX_RGSR_EL1]
#endif                                                        #endif

        ret                                                           ret
endfunc el1_sysregs_context_save                              endfunc el1_sysregs_context_save

/* -------------------------------------------------------    /* -------------------------------------------------------
 * The following function strictly follows the AArch64 PCS     * The following function strictly follows the AArch64 PCS
 * x9-x17 (temporary caller-saved registers) to restore EL     * x9-x17 (temporary caller-saved registers) to restore EL
 * register context.  It assumes that 'x0' is pointing to      * register context.  It assumes that 'x0' is pointing to 
 * 'el1_sys_regs' structure from where the register contex     * 'el1_sys_regs' structure from where the register contex
 * restored                                                    * restored
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func el1_sysregs_context_restore                              func el1_sysregs_context_restore

        ldp     x9, x10, [x0, #CTX_SPSR_EL1]                          ldp     x9, x10, [x0, #CTX_SPSR_EL1]
        msr     spsr_el1, x9                                          msr     spsr_el1, x9
        msr     elr_el1, x10                                          msr     elr_el1, x10

#if !ERRATA_SPECULATIVE_AT                                    #if !ERRATA_SPECULATIVE_AT
        ldp     x15, x16, [x0, #CTX_SCTLR_EL1]                        ldp     x15, x16, [x0, #CTX_SCTLR_EL1]
        msr     sctlr_el1, x15                                        msr     sctlr_el1, x15
        msr     tcr_el1, x16                                          msr     tcr_el1, x16
#endif                                                        #endif

        ldp     x17, x9, [x0, #CTX_CPACR_EL1]                         ldp     x17, x9, [x0, #CTX_CPACR_EL1]
        msr     cpacr_el1, x17                                        msr     cpacr_el1, x17
        msr     csselr_el1, x9                                        msr     csselr_el1, x9

        ldp     x10, x11, [x0, #CTX_SP_EL1]                           ldp     x10, x11, [x0, #CTX_SP_EL1]
        msr     sp_el1, x10                                           msr     sp_el1, x10
        msr     esr_el1, x11                                          msr     esr_el1, x11

        ldp     x12, x13, [x0, #CTX_TTBR0_EL1]                        ldp     x12, x13, [x0, #CTX_TTBR0_EL1]
        msr     ttbr0_el1, x12                                        msr     ttbr0_el1, x12
        msr     ttbr1_el1, x13                                        msr     ttbr1_el1, x13

        ldp     x14, x15, [x0, #CTX_MAIR_EL1]                         ldp     x14, x15, [x0, #CTX_MAIR_EL1]
        msr     mair_el1, x14                                         msr     mair_el1, x14
        msr     amair_el1, x15                                        msr     amair_el1, x15

        ldp     x16, x17, [x0, #CTX_ACTLR_EL1]                        ldp     x16, x17, [x0, #CTX_ACTLR_EL1]
        msr     actlr_el1, x16                                        msr     actlr_el1, x16
        msr     tpidr_el1, x17                                        msr     tpidr_el1, x17

        ldp     x9, x10, [x0, #CTX_TPIDR_EL0]                         ldp     x9, x10, [x0, #CTX_TPIDR_EL0]
        msr     tpidr_el0, x9                                         msr     tpidr_el0, x9
        msr     tpidrro_el0, x10                                      msr     tpidrro_el0, x10

        ldp     x13, x14, [x0, #CTX_PAR_EL1]                          ldp     x13, x14, [x0, #CTX_PAR_EL1]
        msr     par_el1, x13                                          msr     par_el1, x13
        msr     far_el1, x14                                          msr     far_el1, x14

        ldp     x15, x16, [x0, #CTX_AFSR0_EL1]                        ldp     x15, x16, [x0, #CTX_AFSR0_EL1]
        msr     afsr0_el1, x15                                        msr     afsr0_el1, x15
        msr     afsr1_el1, x16                                        msr     afsr1_el1, x16

        ldp     x17, x9, [x0, #CTX_CONTEXTIDR_EL1]                    ldp     x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
        msr     contextidr_el1, x17                                   msr     contextidr_el1, x17
        msr     vbar_el1, x9                                          msr     vbar_el1, x9

        /* Restore AArch32 system registers if the build h            /* Restore AArch32 system registers if the build h
#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        ldp     x11, x12, [x0, #CTX_SPSR_ABT]                         ldp     x11, x12, [x0, #CTX_SPSR_ABT]
        msr     spsr_abt, x11                                         msr     spsr_abt, x11
        msr     spsr_und, x12                                         msr     spsr_und, x12

        ldp     x13, x14, [x0, #CTX_SPSR_IRQ]                         ldp     x13, x14, [x0, #CTX_SPSR_IRQ]
        msr     spsr_irq, x13                                         msr     spsr_irq, x13
        msr     spsr_fiq, x14                                         msr     spsr_fiq, x14

        ldp     x15, x16, [x0, #CTX_DACR32_EL2]                       ldp     x15, x16, [x0, #CTX_DACR32_EL2]
        msr     dacr32_el2, x15                                       msr     dacr32_el2, x15
        msr     ifsr32_el2, x16                                       msr     ifsr32_el2, x16
#endif                                                        #endif
        /* Restore NS timer registers if the build has ins            /* Restore NS timer registers if the build has ins
#if NS_TIMER_SWITCH                                           #if NS_TIMER_SWITCH
        ldp     x10, x11, [x0, #CTX_CNTP_CTL_EL0]                     ldp     x10, x11, [x0, #CTX_CNTP_CTL_EL0]
        msr     cntp_ctl_el0, x10                                     msr     cntp_ctl_el0, x10
        msr     cntp_cval_el0, x11                                    msr     cntp_cval_el0, x11

        ldp     x12, x13, [x0, #CTX_CNTV_CTL_EL0]                     ldp     x12, x13, [x0, #CTX_CNTV_CTL_EL0]
        msr     cntv_ctl_el0, x12                                     msr     cntv_ctl_el0, x12
        msr     cntv_cval_el0, x13                                    msr     cntv_cval_el0, x13

        ldr     x14, [x0, #CTX_CNTKCTL_EL1]                           ldr     x14, [x0, #CTX_CNTKCTL_EL1]
        msr     cntkctl_el1, x14                                      msr     cntkctl_el1, x14
#endif                                                        #endif
        /* Restore MTE system registers if the build has i            /* Restore MTE system registers if the build has i
#if CTX_INCLUDE_MTE_REGS                                      #if CTX_INCLUDE_MTE_REGS
        ldp     x11, x12, [x0, #CTX_TFSRE0_EL1]                       ldp     x11, x12, [x0, #CTX_TFSRE0_EL1]
        msr     TFSRE0_EL1, x11                                       msr     TFSRE0_EL1, x11
        msr     TFSR_EL1, x12                                         msr     TFSR_EL1, x12

        ldp     x13, x14, [x0, #CTX_RGSR_EL1]                         ldp     x13, x14, [x0, #CTX_RGSR_EL1]
        msr     RGSR_EL1, x13                                         msr     RGSR_EL1, x13
        msr     GCR_EL1, x14                                          msr     GCR_EL1, x14
#endif                                                        #endif

        /* No explict ISB required here as ERET covers it             /* No explict ISB required here as ERET covers it 
        ret                                                           ret
endfunc el1_sysregs_context_restore                           endfunc el1_sysregs_context_restore

/* -------------------------------------------------------    /* -------------------------------------------------------
 * The following function follows the aapcs_64 strictly to     * The following function follows the aapcs_64 strictly to
 * x9-x17 (temporary caller-saved registers according to A     * x9-x17 (temporary caller-saved registers according to A
 * to save floating point register context. It assumes tha     * to save floating point register context. It assumes tha
 * pointing to a 'fp_regs' structure where the register co     * pointing to a 'fp_regs' structure where the register co
 * be saved.                                                   * be saved.
 *                                                             *
 * Access to VFP registers will trap if CPTR_EL3.TFP is se     * Access to VFP registers will trap if CPTR_EL3.TFP is se
 * However currently we don't use VFP registers nor set tr     * However currently we don't use VFP registers nor set tr
 * Trusted Firmware, and assume it's cleared.                  * Trusted Firmware, and assume it's cleared.
 *                                                             *
 * TODO: Revisit when VFP is used in secure world              * TODO: Revisit when VFP is used in secure world
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
#if CTX_INCLUDE_FPREGS                                        #if CTX_INCLUDE_FPREGS
func fpregs_context_save                                      func fpregs_context_save
        stp     q0, q1, [x0, #CTX_FP_Q0]                              stp     q0, q1, [x0, #CTX_FP_Q0]
        stp     q2, q3, [x0, #CTX_FP_Q2]                              stp     q2, q3, [x0, #CTX_FP_Q2]
        stp     q4, q5, [x0, #CTX_FP_Q4]                              stp     q4, q5, [x0, #CTX_FP_Q4]
        stp     q6, q7, [x0, #CTX_FP_Q6]                              stp     q6, q7, [x0, #CTX_FP_Q6]
        stp     q8, q9, [x0, #CTX_FP_Q8]                              stp     q8, q9, [x0, #CTX_FP_Q8]
        stp     q10, q11, [x0, #CTX_FP_Q10]                           stp     q10, q11, [x0, #CTX_FP_Q10]
        stp     q12, q13, [x0, #CTX_FP_Q12]                           stp     q12, q13, [x0, #CTX_FP_Q12]
        stp     q14, q15, [x0, #CTX_FP_Q14]                           stp     q14, q15, [x0, #CTX_FP_Q14]
        stp     q16, q17, [x0, #CTX_FP_Q16]                           stp     q16, q17, [x0, #CTX_FP_Q16]
        stp     q18, q19, [x0, #CTX_FP_Q18]                           stp     q18, q19, [x0, #CTX_FP_Q18]
        stp     q20, q21, [x0, #CTX_FP_Q20]                           stp     q20, q21, [x0, #CTX_FP_Q20]
        stp     q22, q23, [x0, #CTX_FP_Q22]                           stp     q22, q23, [x0, #CTX_FP_Q22]
        stp     q24, q25, [x0, #CTX_FP_Q24]                           stp     q24, q25, [x0, #CTX_FP_Q24]
        stp     q26, q27, [x0, #CTX_FP_Q26]                           stp     q26, q27, [x0, #CTX_FP_Q26]
        stp     q28, q29, [x0, #CTX_FP_Q28]                           stp     q28, q29, [x0, #CTX_FP_Q28]
        stp     q30, q31, [x0, #CTX_FP_Q30]                           stp     q30, q31, [x0, #CTX_FP_Q30]

        mrs     x9, fpsr                                              mrs     x9, fpsr
        str     x9, [x0, #CTX_FP_FPSR]                                str     x9, [x0, #CTX_FP_FPSR]

        mrs     x10, fpcr                                             mrs     x10, fpcr
        str     x10, [x0, #CTX_FP_FPCR]                               str     x10, [x0, #CTX_FP_FPCR]

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        mrs     x11, fpexc32_el2                                      mrs     x11, fpexc32_el2
        str     x11, [x0, #CTX_FP_FPEXC32_EL2]                        str     x11, [x0, #CTX_FP_FPEXC32_EL2]
#endif                                                        #endif
        ret                                                           ret
endfunc fpregs_context_save                                   endfunc fpregs_context_save

/* -------------------------------------------------------    /* -------------------------------------------------------
 * The following function follows the aapcs_64 strictly to     * The following function follows the aapcs_64 strictly to
 * (temporary caller-saved registers according to AArch64      * (temporary caller-saved registers according to AArch64 
 * restore floating point register context. It assumes tha     * restore floating point register context. It assumes tha
 * pointing to a 'fp_regs' structure from where the regist     * pointing to a 'fp_regs' structure from where the regist
 * will be restored.                                           * will be restored.
 *                                                             *
 * Access to VFP registers will trap if CPTR_EL3.TFP is se     * Access to VFP registers will trap if CPTR_EL3.TFP is se
 * However currently we don't use VFP registers nor set tr     * However currently we don't use VFP registers nor set tr
 * Trusted Firmware, and assume it's cleared.                  * Trusted Firmware, and assume it's cleared.
 *                                                             *
 * TODO: Revisit when VFP is used in secure world              * TODO: Revisit when VFP is used in secure world
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func fpregs_context_restore                                   func fpregs_context_restore
        ldp     q0, q1, [x0, #CTX_FP_Q0]                              ldp     q0, q1, [x0, #CTX_FP_Q0]
        ldp     q2, q3, [x0, #CTX_FP_Q2]                              ldp     q2, q3, [x0, #CTX_FP_Q2]
        ldp     q4, q5, [x0, #CTX_FP_Q4]                              ldp     q4, q5, [x0, #CTX_FP_Q4]
        ldp     q6, q7, [x0, #CTX_FP_Q6]                              ldp     q6, q7, [x0, #CTX_FP_Q6]
        ldp     q8, q9, [x0, #CTX_FP_Q8]                              ldp     q8, q9, [x0, #CTX_FP_Q8]
        ldp     q10, q11, [x0, #CTX_FP_Q10]                           ldp     q10, q11, [x0, #CTX_FP_Q10]
        ldp     q12, q13, [x0, #CTX_FP_Q12]                           ldp     q12, q13, [x0, #CTX_FP_Q12]
        ldp     q14, q15, [x0, #CTX_FP_Q14]                           ldp     q14, q15, [x0, #CTX_FP_Q14]
        ldp     q16, q17, [x0, #CTX_FP_Q16]                           ldp     q16, q17, [x0, #CTX_FP_Q16]
        ldp     q18, q19, [x0, #CTX_FP_Q18]                           ldp     q18, q19, [x0, #CTX_FP_Q18]
        ldp     q20, q21, [x0, #CTX_FP_Q20]                           ldp     q20, q21, [x0, #CTX_FP_Q20]
        ldp     q22, q23, [x0, #CTX_FP_Q22]                           ldp     q22, q23, [x0, #CTX_FP_Q22]
        ldp     q24, q25, [x0, #CTX_FP_Q24]                           ldp     q24, q25, [x0, #CTX_FP_Q24]
        ldp     q26, q27, [x0, #CTX_FP_Q26]                           ldp     q26, q27, [x0, #CTX_FP_Q26]
        ldp     q28, q29, [x0, #CTX_FP_Q28]                           ldp     q28, q29, [x0, #CTX_FP_Q28]
        ldp     q30, q31, [x0, #CTX_FP_Q30]                           ldp     q30, q31, [x0, #CTX_FP_Q30]

        ldr     x9, [x0, #CTX_FP_FPSR]                                ldr     x9, [x0, #CTX_FP_FPSR]
        msr     fpsr, x9                                              msr     fpsr, x9

        ldr     x10, [x0, #CTX_FP_FPCR]                               ldr     x10, [x0, #CTX_FP_FPCR]
        msr     fpcr, x10                                             msr     fpcr, x10

#if CTX_INCLUDE_AARCH32_REGS                                  #if CTX_INCLUDE_AARCH32_REGS
        ldr     x11, [x0, #CTX_FP_FPEXC32_EL2]                        ldr     x11, [x0, #CTX_FP_FPEXC32_EL2]
        msr     fpexc32_el2, x11                                      msr     fpexc32_el2, x11
#endif                                                        #endif
        /*                                                            /*
         * No explict ISB required here as ERET to                     * No explict ISB required here as ERET to
         * switch to secure EL1 or non-secure world                    * switch to secure EL1 or non-secure world
         * covers it                                                   * covers it
         */                                                            */

        ret                                                           ret
endfunc fpregs_context_restore                                endfunc fpregs_context_restore
#endif /* CTX_INCLUDE_FPREGS */                               #endif /* CTX_INCLUDE_FPREGS */

/* -------------------------------------------------------    /* -------------------------------------------------------
 * The following function is used to save and restore all      * The following function is used to save and restore all 
 * purpose and ARMv8.3-PAuth (if enabled) registers.           * purpose and ARMv8.3-PAuth (if enabled) registers.
 * It also checks if Secure Cycle Counter is not disabled      * It also checks if Secure Cycle Counter is not disabled 
 * when ARMv8.5-PMU is implemented, and if called from Non     * when ARMv8.5-PMU is implemented, and if called from Non
 * state saves PMCR_EL0 and disables Cycle Counter.            * state saves PMCR_EL0 and disables Cycle Counter.
 *                                                             *
 * Ideally we would only save and restore the callee saved     * Ideally we would only save and restore the callee saved
 * when a world switch occurs but that type of implementat     * when a world switch occurs but that type of implementat
 * complex. So currently we will always save and restore t     * complex. So currently we will always save and restore t
 * registers on entry and exit of EL3.                         * registers on entry and exit of EL3.
 * These are not macros to ensure their invocation fits wi     * These are not macros to ensure their invocation fits wi
 * instructions per exception vector.                          * instructions per exception vector.
 * clobbers: x18                                               * clobbers: x18
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func save_gp_pmcr_pauth_regs                                  func save_gp_pmcr_pauth_regs
        stp     x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            stp     x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        stp     x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            stp     x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        stp     x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            stp     x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        stp     x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            stp     x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        stp     x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            stp     x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        stp     x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        stp     x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        mrs     x18, sp_el0                                           mrs     x18, sp_el0
        str     x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_S            str     x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_S

        /* -----------------------------------------------            /* -----------------------------------------------
         * Check if earlier initialization MDCR_EL3.SCCD/M             * Check if earlier initialization MDCR_EL3.SCCD/M
         * failed, meaning that FEAT_PMUv3p5/7 is not impl             * failed, meaning that FEAT_PMUv3p5/7 is not impl
         * PMCR_EL0 should be saved in non-secure context.             * PMCR_EL0 should be saved in non-secure context.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)                  mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
        mrs     x9, mdcr_el3                                          mrs     x9, mdcr_el3
        tst     x9, x10                                               tst     x9, x10
        bne     1f                                                    bne     1f

        /* Secure Cycle Counter is not disabled */                    /* Secure Cycle Counter is not disabled */
        mrs     x9, pmcr_el0                                          mrs     x9, pmcr_el0

        /* Check caller's security state */                           /* Check caller's security state */
        mrs     x10, scr_el3                                          mrs     x10, scr_el3
        tst     x10, #SCR_NS_BIT                                      tst     x10, #SCR_NS_BIT
        beq     2f                                                    beq     2f

        /* Save PMCR_EL0 if called from Non-secure state *            /* Save PMCR_EL0 if called from Non-secure state *
        str     x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_E            str     x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_E

        /* Disable cycle counter when event counting is pr            /* Disable cycle counter when event counting is pr
2:      orr     x9, x9, #PMCR_EL0_DP_BIT                      2:      orr     x9, x9, #PMCR_EL0_DP_BIT
        msr     pmcr_el0, x9                                          msr     pmcr_el0, x9
        isb                                                           isb
1:                                                            1:
#if CTX_INCLUDE_PAUTH_REGS                                    #if CTX_INCLUDE_PAUTH_REGS
        /* -----------------------------------------------            /* -----------------------------------------------
         * Save the ARMv8.3-PAuth keys as they are not ban             * Save the ARMv8.3-PAuth keys as they are not ban
         * by exception level                                          * by exception level
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        add     x19, sp, #CTX_PAUTH_REGS_OFFSET                       add     x19, sp, #CTX_PAUTH_REGS_OFFSET

        mrs     x20, APIAKeyLo_EL1      /* x21:x20 = APIAK            mrs     x20, APIAKeyLo_EL1      /* x21:x20 = APIAK
        mrs     x21, APIAKeyHi_EL1                                    mrs     x21, APIAKeyHi_EL1
        mrs     x22, APIBKeyLo_EL1      /* x23:x22 = APIBK            mrs     x22, APIBKeyLo_EL1      /* x23:x22 = APIBK
        mrs     x23, APIBKeyHi_EL1                                    mrs     x23, APIBKeyHi_EL1
        mrs     x24, APDAKeyLo_EL1      /* x25:x24 = APDAK            mrs     x24, APDAKeyLo_EL1      /* x25:x24 = APDAK
        mrs     x25, APDAKeyHi_EL1                                    mrs     x25, APDAKeyHi_EL1
        mrs     x26, APDBKeyLo_EL1      /* x27:x26 = APDBK            mrs     x26, APDBKeyLo_EL1      /* x27:x26 = APDBK
        mrs     x27, APDBKeyHi_EL1                                    mrs     x27, APDBKeyHi_EL1
        mrs     x28, APGAKeyLo_EL1      /* x29:x28 = APGAK            mrs     x28, APGAKeyLo_EL1      /* x29:x28 = APGAK
        mrs     x29, APGAKeyHi_EL1                                    mrs     x29, APGAKeyHi_EL1

        stp     x20, x21, [x19, #CTX_PACIAKEY_LO]                     stp     x20, x21, [x19, #CTX_PACIAKEY_LO]
        stp     x22, x23, [x19, #CTX_PACIBKEY_LO]                     stp     x22, x23, [x19, #CTX_PACIBKEY_LO]
        stp     x24, x25, [x19, #CTX_PACDAKEY_LO]                     stp     x24, x25, [x19, #CTX_PACDAKEY_LO]
        stp     x26, x27, [x19, #CTX_PACDBKEY_LO]                     stp     x26, x27, [x19, #CTX_PACDBKEY_LO]
        stp     x28, x29, [x19, #CTX_PACGAKEY_LO]                     stp     x28, x29, [x19, #CTX_PACGAKEY_LO]
#endif /* CTX_INCLUDE_PAUTH_REGS */                           #endif /* CTX_INCLUDE_PAUTH_REGS */

        ret                                                           ret
endfunc save_gp_pmcr_pauth_regs                               endfunc save_gp_pmcr_pauth_regs

/* -------------------------------------------------------    /* -------------------------------------------------------
 * This function restores ARMv8.3-PAuth (if enabled) and a     * This function restores ARMv8.3-PAuth (if enabled) and a
 * purpose registers except x30 from the CPU context.          * purpose registers except x30 from the CPU context.
 * x30 register must be explicitly restored by the caller.     * x30 register must be explicitly restored by the caller.
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func restore_gp_pmcr_pauth_regs                               func restore_gp_pmcr_pauth_regs
#if CTX_INCLUDE_PAUTH_REGS                                    #if CTX_INCLUDE_PAUTH_REGS
        /* Restore the ARMv8.3 PAuth keys */                          /* Restore the ARMv8.3 PAuth keys */
        add     x10, sp, #CTX_PAUTH_REGS_OFFSET                       add     x10, sp, #CTX_PAUTH_REGS_OFFSET

        ldp     x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 =            ldp     x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 =
        ldp     x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 =            ldp     x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 =
        ldp     x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 =            ldp     x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 =
        ldp     x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 =            ldp     x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 =
        ldp     x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 =            ldp     x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 =

        msr     APIAKeyLo_EL1, x0                                     msr     APIAKeyLo_EL1, x0
        msr     APIAKeyHi_EL1, x1                                     msr     APIAKeyHi_EL1, x1
        msr     APIBKeyLo_EL1, x2                                     msr     APIBKeyLo_EL1, x2
        msr     APIBKeyHi_EL1, x3                                     msr     APIBKeyHi_EL1, x3
        msr     APDAKeyLo_EL1, x4                                     msr     APDAKeyLo_EL1, x4
        msr     APDAKeyHi_EL1, x5                                     msr     APDAKeyHi_EL1, x5
        msr     APDBKeyLo_EL1, x6                                     msr     APDBKeyLo_EL1, x6
        msr     APDBKeyHi_EL1, x7                                     msr     APDBKeyHi_EL1, x7
        msr     APGAKeyLo_EL1, x8                                     msr     APGAKeyLo_EL1, x8
        msr     APGAKeyHi_EL1, x9                                     msr     APGAKeyHi_EL1, x9
#endif /* CTX_INCLUDE_PAUTH_REGS */                           #endif /* CTX_INCLUDE_PAUTH_REGS */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Restore PMCR_EL0 when returning to Non-secure s             * Restore PMCR_EL0 when returning to Non-secure s
         * Secure Cycle Counter is not disabled in MDCR_EL             * Secure Cycle Counter is not disabled in MDCR_EL
         * ARMv8.5-PMU is implemented.                                 * ARMv8.5-PMU is implemented.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mrs     x0, scr_el3                                           mrs     x0, scr_el3
        tst     x0, #SCR_NS_BIT                                       tst     x0, #SCR_NS_BIT
        beq     2f                                                    beq     2f

        /* -----------------------------------------------            /* -----------------------------------------------
         * Back to Non-secure state.                                   * Back to Non-secure state.
         * Check if earlier initialization MDCR_EL3.SCCD/M             * Check if earlier initialization MDCR_EL3.SCCD/M
         * failed, meaning that FEAT_PMUv3p5/7 is not impl             * failed, meaning that FEAT_PMUv3p5/7 is not impl
         * PMCR_EL0 should be restored from non-secure con             * PMCR_EL0 should be restored from non-secure con
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)                   mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
        mrs     x0, mdcr_el3                                          mrs     x0, mdcr_el3
        tst     x0, x1                                                tst     x0, x1
        bne     2f                                                    bne     2f
        ldr     x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_E            ldr     x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_E
        msr     pmcr_el0, x0                                          msr     pmcr_el0, x0
2:                                                            2:
        ldp     x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            ldp     x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        ldp     x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            ldp     x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        ldp     x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            ldp     x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        ldp     x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            ldp     x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        ldp     x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE            ldp     x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPRE
        ldp     x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldp     x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ldr     x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_S            ldr     x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_S
        msr     sp_el0, x28                                           msr     sp_el0, x28
        ldp     x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        ret                                                           ret
endfunc restore_gp_pmcr_pauth_regs                            endfunc restore_gp_pmcr_pauth_regs

/*                                                            /*
 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TC     * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TC
 * registers and update EL1 registers to disable stage1 an     * registers and update EL1 registers to disable stage1 an
 * page table walk                                             * page table walk
 */                                                            */
func save_and_update_ptw_el1_sys_regs                         func save_and_update_ptw_el1_sys_regs
        /* -----------------------------------------------            /* -----------------------------------------------
         * Save only sctlr_el1 and tcr_el1 registers                   * Save only sctlr_el1 and tcr_el1 registers
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mrs     x29, sctlr_el1                                        mrs     x29, sctlr_el1
        str     x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_S            str     x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_S
        mrs     x29, tcr_el1                                          mrs     x29, tcr_el1
        str     x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_T            str     x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_T

        /* -----------------------------------------------            /* -----------------------------------------------
         * Must follow below order in order to disable pag             * Must follow below order in order to disable pag
         * walk for lower ELs (EL1 and EL0). First step en             * walk for lower ELs (EL1 and EL0). First step en
         * page table walk is disabled for stage1 and seco             * page table walk is disabled for stage1 and seco
         * ensures that page table walker should use TCR_E             * ensures that page table walker should use TCR_E
         * bits to perform address translation. ISB ensure             * bits to perform address translation. ISB ensure
         * does these 2 steps in order.                                * does these 2 steps in order.
         *                                                             *
         * 1. Update TCR_EL1.EPDx bits to disable page tab             * 1. Update TCR_EL1.EPDx bits to disable page tab
         *    stage1.                                                  *    stage1.
         * 2. Enable MMU bit to avoid identity mapping via             * 2. Enable MMU bit to avoid identity mapping via
         *    and force TCR_EL1.EPDx to be used by the pag             *    and force TCR_EL1.EPDx to be used by the pag
         *    walker.                                                  *    walker.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        orr     x29, x29, #(TCR_EPD0_BIT)                             orr     x29, x29, #(TCR_EPD0_BIT)
        orr     x29, x29, #(TCR_EPD1_BIT)                             orr     x29, x29, #(TCR_EPD1_BIT)
        msr     tcr_el1, x29                                          msr     tcr_el1, x29
        isb                                                           isb
        mrs     x29, sctlr_el1                                        mrs     x29, sctlr_el1
        orr     x29, x29, #SCTLR_M_BIT                                orr     x29, x29, #SCTLR_M_BIT
        msr     sctlr_el1, x29                                        msr     sctlr_el1, x29
        isb                                                           isb

        ret                                                           ret
endfunc save_and_update_ptw_el1_sys_regs                      endfunc save_and_update_ptw_el1_sys_regs

/* -------------------------------------------------------    /* -------------------------------------------------------
 * This routine assumes that the SP_EL3 is pointing to a v     * This routine assumes that the SP_EL3 is pointing to a v
 * context structure from where the gp regs and other spec     * context structure from where the gp regs and other spec
 * registers can be retrieved.                                 * registers can be retrieved.
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
func el3_exit                                                 func el3_exit
#if ENABLE_ASSERTIONS                                         #if ENABLE_ASSERTIONS
        /* el3_exit assumes SP_EL0 on entry */                        /* el3_exit assumes SP_EL0 on entry */
        mrs     x17, spsel                                            mrs     x17, spsel
        cmp     x17, #MODE_SP_EL0                                     cmp     x17, #MODE_SP_EL0
        ASM_ASSERT(eq)                                                ASM_ASSERT(eq)
#endif                                                        #endif

        /* -----------------------------------------------            /* -----------------------------------------------
         * Save the current SP_EL0 i.e. the EL3 runtime st             * Save the current SP_EL0 i.e. the EL3 runtime st
         * will be used for handling the next SMC.                     * will be used for handling the next SMC.
         * Then switch to SP_EL3.                                      * Then switch to SP_EL3.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov     x17, sp                                               mov     x17, sp
        msr     spsel, #MODE_SP_ELX                                   msr     spsel, #MODE_SP_ELX
        str     x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTI            str     x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTI

        /* -----------------------------------------------            /* -----------------------------------------------
         * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to              * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to 
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        ldr     x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_E            ldr     x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_E
        ldp     x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_            ldp     x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_
        msr     scr_el3, x18                                          msr     scr_el3, x18
        msr     spsr_el3, x16                                         msr     spsr_el3, x16
        msr     elr_el3, x17                                          msr     elr_el3, x17

#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639            #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
        /* -----------------------------------------------            /* -----------------------------------------------
         * Restore mitigation state as it was on entry to              * Restore mitigation state as it was on entry to 
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        ldr     x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_            ldr     x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_
        cbz     x17, 1f                                               cbz     x17, 1f
        blr     x17                                                   blr     x17
1:                                                            1:
#endif                                                        #endif
        restore_ptw_el1_sys_regs                                      restore_ptw_el1_sys_regs

        /* -----------------------------------------------            /* -----------------------------------------------
         * Restore general purpose (including x30), PMCR_E             * Restore general purpose (including x30), PMCR_E
         * ARMv8.3-PAuth registers.                                    * ARMv8.3-PAuth registers.
         * Exit EL3 via ERET to a lower exception level.               * Exit EL3 via ERET to a lower exception level.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        bl      restore_gp_pmcr_pauth_regs                            bl      restore_gp_pmcr_pauth_regs
        ldr     x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_L            ldr     x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_L

#if IMAGE_BL31 && RAS_EXTENSION                               #if IMAGE_BL31 && RAS_EXTENSION
        /* -----------------------------------------------            /* -----------------------------------------------
         * Issue Error Synchronization Barrier to synchron             * Issue Error Synchronization Barrier to synchron
         * before exiting EL3. We're running with EAs unma             * before exiting EL3. We're running with EAs unma
         * any synchronized errors would be taken immediat             * any synchronized errors would be taken immediat
         * therefore no need to inspect DISR_EL1 register.             * therefore no need to inspect DISR_EL1 register.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        esb                                                           esb
#else                                                         #else
        dsb     sy                                                    dsb     sy
#endif                                                        #endif
#ifdef IMAGE_BL31                                             #ifdef IMAGE_BL31
        str     xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN            str     xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN
#endif                                                        #endif
        exception_return                                              exception_return

endfunc el3_exit                                              endfunc el3_exit
Last Author
garymorrison-arm
Last Edited
Jul 2 2021, 10:59 PM