Page MenuHomePhabricator

Includearchaarch64el3 Common Macrossvsplatarmboardel Max Common Macross
Updated 1,249 Days AgoPublic

/*                                                            /*
 * Copyright (c) 2015-2021, ARM Limited and Contributors.      * Copyright (c) 2015-2021, ARM Limited and Contributors. 
 *                                                             *
 * SPDX-License-Identifier: BSD-3-Clause                       * SPDX-License-Identifier: BSD-3-Clause
 */                                                            */

#ifndef EL3_COMMON_MACROS_S                                |  #ifndef EL_MAX_COMMON_MACROS_S
#define EL3_COMMON_MACROS_S                                |  #define EL_MAX_COMMON_MACROS_S

#include <arch.h>                                             #include <arch.h>
#include <asm_macros.S>                                       #include <asm_macros.S>
#include <context.h>                                          #include <context.h>
#include <lib/xlat_tables/xlat_tables_defs.h>                 #include <lib/xlat_tables/xlat_tables_defs.h>
                                                           >  #include <platform_def.h>

        /*                                                            /*
         * Helper macro to initialise EL3 registers we car |           * Helper macro to initialise system registers at 
                                                           >           * support ("_el_max").  Typically this is EL3, bu
                                                           >           * does not support EL3, so _el_max for it is 2.
         */                                                            */
        .macro el3_arch_init_common                        |          .macro el_max_arch_init_common                    
                                                           >                  _el_max
        /* -----------------------------------------------            /* -----------------------------------------------
         * SCTLR_EL3 has already been initialised - read c |           * SCTLR_EL<max> has already been initialised - re
         * modifying.                                      |           * before modifying.
         *                                                             *
         * SCTLR_EL3.I: Enable the instruction cache.      |           * SCTLR_EL<max>.I: Enable the instruction cache.
         *                                                             *
         * SCTLR_EL3.SA: Enable Stack Alignment check. A S |           * SCTLR_EL<max>.SA: Enable Stack Alignment check.
         *  exception is generated if a load or store inst             *  exception is generated if a load or store inst
         *  EL3 uses the SP as the base address and the SP             *  EL3 uses the SP as the base address and the SP
         *  16-byte boundary.                                          *  16-byte boundary.
         *                                                             *
         * SCTLR_EL3.A: Enable Alignment fault checking. A |           * SCTLR_EL<max>.A: Enable Alignment fault checkin
         *  load or store one or more registers have an al             *  load or store one or more registers have an al
         *  address being accessed is aligned to the size              *  address being accessed is aligned to the size 
         *  being accessed.                                            *  being accessed.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov     x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA            mov     x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA
        mrs     x0, sctlr_el3                              |          mrs     x0, sctlr_el\_el_max
        orr     x0, x0, x1                                            orr     x0, x0, x1
        msr     sctlr_el3, x0                              |          msr     sctlr_el\_el_max, x0
        isb                                                           isb

#ifdef IMAGE_BL31                                             #ifdef IMAGE_BL31
        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialise the per-cpu cache pointer to the CPU             * Initialise the per-cpu cache pointer to the CPU
         * This is done early to enable crash reporting to             * This is done early to enable crash reporting to
         * stack. Since crash reporting depends on cpu_dat             * stack. Since crash reporting depends on cpu_dat
         * unhandled exception, not doing so can lead to r             * unhandled exception, not doing so can lead to r
         * due to a NULL TPIDR_EL3.                        |           * due to a NULL TPIDR_EL<max>.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        bl      init_cpu_data_ptr                                     bl      init_cpu_data_ptr
#endif /* IMAGE_BL31 */                                       #endif /* IMAGE_BL31 */

                                                           >  #ifndef NO_EL3
        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialise SCR_EL3, setting all fields rather t |           * Initialise SCR_EL<max>, setting all fields rath
         * All fields are architecturally UNKNOWN on reset             * All fields are architecturally UNKNOWN on reset
         * do not change during the TF lifetime. The remai             * do not change during the TF lifetime. The remai
         * zero here but are updated ahead of transitionin             * zero here but are updated ahead of transitionin
         * function cm_init_context_common().                          * function cm_init_context_common().
         *                                                             *
         * SCR_EL3.TWE: Set to zero so that execution of W |           * SCR_EL<max>.TWE: Set to zero so that execution 
         *  EL2, EL1 and EL0 are not trapped to EL3.       |           *  EL2, EL1 and EL0 are not trapped to EL<max>.
         *                                                             *
         * SCR_EL3.TWI: Set to zero so that execution of W |           * SCR_EL<max>.TWI: Set to zero so that execution 
         *  EL2, EL1 and EL0 are not trapped to EL3.       |           *  EL2, EL1 and EL0 are not trapped to EL<max>.
         *                                                             *
         * SCR_EL3.SIF: Set to one to disable instruction  |           * SCR_EL<max>.SIF: Set to one to disable instruct
         *  Non-secure memory.                                         *  Non-secure memory.
         *                                                             *
         * SCR_EL3.SMD: Set to zero to enable SMC calls at |           * SCR_EL<max>.SMD: Set to zero to enable SMC call
         *  both Security states and both Execution states |           *  from both Security states and both Execution s
         *                                                             *
         * SCR_EL3.EA: Set to one to route External Aborts |           * SCR_EL<max>.EA: Set to one to route External Ab
         *  to EL3 when executing at any EL.               |           * Interrupts to EL<max> when executing at any EL.
         *                                                             *
         * SCR_EL3.{API,APK}: For Armv8.3 pointer authenti |           * SCR_EL<max>.{API,APK}: For Armv8.3 pointer auth
         * disable traps to EL3 when accessing key registe |           * disable traps to EL<max> when accessing key reg
         * authentication instructions from lower ELs.     |           * pointer authentication instructions from lower 
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov_imm x0, ((SCR_RESET_VAL | SCR_EA_BIT | SCR_SIF            mov_imm x0, ((SCR_RESET_VAL | SCR_EA_BIT | SCR_SIF
                        & ~(SCR_TWE_BIT | SCR_TWI_BIT | SC                            & ~(SCR_TWE_BIT | SCR_TWI_BIT | SC
#if CTX_INCLUDE_PAUTH_REGS                                    #if CTX_INCLUDE_PAUTH_REGS
        /*                                                            /*
         * If the pointer authentication registers are sav             * If the pointer authentication registers are sav
         * switches, enable pointer authentication everywh             * switches, enable pointer authentication everywh
         * do so.                                                      * do so.
         */                                                            */
        orr     x0, x0, #(SCR_API_BIT | SCR_APK_BIT)                  orr     x0, x0, #(SCR_API_BIT | SCR_APK_BIT)
#endif                                                     |  #endif  /* CTX_INCLUDE_PAUTH_REGS */
        msr     scr_el3, x0                                           msr     scr_el3, x0
                                                           >  #endif  /* ndef NO_EL3 */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialise MDCR_EL3, setting all fields rather  |           * Initialise MDCR_EL<max>, setting all fields rat
         * Some fields are architecturally UNKNOWN on rese |           * hw. Some fields are architecturally UNKNOWN on 
         *                                                 <
         * MDCR_EL3.SDD: Set to one to disable AArch64 Sec <
         *  Debug exceptions, other than Breakpoint Instru <
         *  disabled from all ELs in Secure state.         <
         *                                                             *
         * MDCR_EL3.SPD32: Set to 0b10 to disable AArch32  |           * MDCR_EL<max>.SDD: Set to one to disable AArch64
         *  privileged debug from S-EL1.                   |           *  debug. Debug exceptions, other than Breakpoint
                                                           >           *  exceptions, are disabled from all ELs in Secur
         *                                                             *
         * MDCR_EL3.TDOSA: Set to zero so that EL2 and EL2 |           * MDCR_EL<max>.TDOSA: Set to zero so that EL2 and
         *  access to the powerdown debug registers do not |           *  access to the powerdown debug registers do not
         *                                                             *
         * MDCR_EL3.TDA: Set to zero to allow EL0, EL1 and |           * MDCR_EL<max>.TDA: Set to zero to allow EL0, EL1
         *  debug registers, other than those registers th             *  debug registers, other than those registers th
         *  MDCR_EL3.TDOSA.                                |           *  MDCR_EL<max>.TDOSA.
         *                                                             *
         * MDCR_EL3.TPM: Set to zero so that EL0, EL1, and |           * MDCR_EL<max>.TPM: Set to zero so that EL0, EL1,
         *  accesses to all Performance Monitors registers |           *  register accesses to all Performance Monitors 
                                                           >           *  to EL<max>.
         *                                                             *
         * MDCR_EL3.SCCD: Set to one so that cycle countin |           * MDCR_EL<max>.SCCD: Set to one so that cycle cou
         *  prohibited in Secure state. This bit is RES0 i |           *  is prohibited in Secure state. This bit is RES
         *  architecture with FEAT_PMUv3p5 not implemented             *  architecture with FEAT_PMUv3p5 not implemented
         *  doesn't have any effect on them.                           *  doesn't have any effect on them.
         *                                                             *
         * MDCR_EL3.MCCD: Set to one so that cycle countin |           * MDCR_EL<max>.MCCD: Set to one so that cycle cou
         *  prohibited in EL3. This bit is RES0 in version |           *  is prohibited in EL<max>. This bit is RES0 in 
         *  architecture with FEAT_PMUv3p7 not implemented             *  architecture with FEAT_PMUv3p7 not implemented
         *  doesn't have any effect on them.                           *  doesn't have any effect on them.
         *                                                             *
         * MDCR_EL3.SPME: Set to zero so that event counti |           * MDCR_EL<max>.SPME: Set to zero so that event co
         *  counters PMEVCNTR<n>_EL0 is prohibited in Secu |           *  mable counters PMEVCNTR<n>_EL0 is prohibited i
         *  Debug is not implemented this bit does not hav |           *  ARMv8.2 Debug is not implemented this bit does
         *  counters unless there is support for the imple |           *  on the counters unless there is support for th
         *  authentication interface ExternalSecureNoninva |           *  defined authentication interface
                                                           >           *  ExternalSecureNoninvasiveDebugEnabled().
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov_imm x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT |             mov_imm x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT | 
                      MDCR_SPD32(MDCR_SPD32_DISABLE) | MDC                          MDCR_SPD32(MDCR_SPD32_DISABLE) | MDC
                      MDCR_MCCD_BIT) & ~(MDCR_SPME_BIT | M                          MDCR_MCCD_BIT) & ~(MDCR_SPME_BIT | M
                      MDCR_TDA_BIT | MDCR_TPM_BIT))                                 MDCR_TDA_BIT | MDCR_TPM_BIT))

        msr     mdcr_el3, x0                               |          msr     mdcr_el\_el_max, x0

        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialise PMCR_EL0 setting all fields rather t             * Initialise PMCR_EL0 setting all fields rather t
         * on hw. Some fields are architecturally UNKNOWN              * on hw. Some fields are architecturally UNKNOWN 
         *                                                             *
         * PMCR_EL0.LP: Set to one so that event counter o             * PMCR_EL0.LP: Set to one so that event counter o
         *  is recorded in PMOVSCLR_EL0[0-30], occurs on t             *  is recorded in PMOVSCLR_EL0[0-30], occurs on t
         *  that changes PMEVCNTR<n>_EL0[63] from 1 to 0,              *  that changes PMEVCNTR<n>_EL0[63] from 1 to 0, 
         *  is implemented. This bit is RES0 in versions o             *  is implemented. This bit is RES0 in versions o
         *  earlier than ARMv8.5, setting it to 1 doesn't              *  earlier than ARMv8.5, setting it to 1 doesn't 
         *  on them.                                                   *  on them.
         *                                                             *
         * PMCR_EL0.LC: Set to one so that cycle counter o             * PMCR_EL0.LC: Set to one so that cycle counter o
         *  is recorded in PMOVSCLR_EL0[31], occurs on the             *  is recorded in PMOVSCLR_EL0[31], occurs on the
         *  that changes PMCCNTR_EL0[63] from 1 to 0.                  *  that changes PMCCNTR_EL0[63] from 1 to 0.
         *                                                             *
         * PMCR_EL0.DP: Set to one so that the cycle count             * PMCR_EL0.DP: Set to one so that the cycle count
         *  PMCCNTR_EL0 does not count when event counting             *  PMCCNTR_EL0 does not count when event counting
         *                                                             *
         * PMCR_EL0.X: Set to zero to disable export of ev             * PMCR_EL0.X: Set to zero to disable export of ev
         *                                                             *
         * PMCR_EL0.D: Set to zero so that, when enabled,              * PMCR_EL0.D: Set to zero so that, when enabled, 
         *  counts on every clock cycle.                               *  counts on every clock cycle.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        mov_imm x0, ((PMCR_EL0_RESET_VAL | PMCR_EL0_LP_BIT            mov_imm x0, ((PMCR_EL0_RESET_VAL | PMCR_EL0_LP_BIT
                      PMCR_EL0_LC_BIT | PMCR_EL0_DP_BIT) &                          PMCR_EL0_LC_BIT | PMCR_EL0_DP_BIT) &
                    ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT))                           ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT))

        msr     pmcr_el0, x0                                          msr     pmcr_el0, x0

        /* -----------------------------------------------            /* -----------------------------------------------
         * Enable External Aborts and SError Interrupts no             * Enable External Aborts and SError Interrupts no
         * vectors have been setup.                                    * vectors have been setup.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        msr     daifclr, #DAIF_ABT_BIT                                msr     daifclr, #DAIF_ABT_BIT

        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialise CPTR_EL3, setting all fields rather              * Initialise CPTR_EL3, setting all fields rather 
         * All fields are architecturally UNKNOWN on reset             * All fields are architecturally UNKNOWN on reset
         *                                                             *
         * CPTR_EL3.TCPAC: Set to zero so that any accesse             * CPTR_EL3.TCPAC: Set to zero so that any accesse
         *  CPTR_EL2, CPACR, or HCPTR do not trap to EL3.              *  CPTR_EL2, CPACR, or HCPTR do not trap to EL3.
         *                                                             *
         * CPTR_EL3.TTA: Set to zero so that System regist             * CPTR_EL3.TTA: Set to zero so that System regist
         *  trace registers do not trap to EL3.                        *  trace registers do not trap to EL3.
         *                                                             *
         * CPTR_EL3.TFP: Set to zero so that accesses to t             * CPTR_EL3.TFP: Set to zero so that accesses to t
         *  by Advanced SIMD, floating-point or SVE instru             *  by Advanced SIMD, floating-point or SVE instru
         *  do not trap to EL3.                                        *  do not trap to EL3.
         */                                                            */
        mov_imm x0, (CPTR_EL3_RESET_VAL & ~(TCPAC_BIT | TT            mov_imm x0, (CPTR_EL3_RESET_VAL & ~(TCPAC_BIT | TT
        msr     cptr_el3, x0                               |          msr     cptr_el\_el_max, x0

        /*                                                            /*
         * If Data Independent Timing (DIT) functionality              * If Data Independent Timing (DIT) functionality 
         * always enable DIT in EL3                                    * always enable DIT in EL3
         */                                                            */
        mrs     x0, id_aa64pfr0_el1                                   mrs     x0, id_aa64pfr0_el1
        ubfx    x0, x0, #ID_AA64PFR0_DIT_SHIFT, #ID_AA64PF            ubfx    x0, x0, #ID_AA64PFR0_DIT_SHIFT, #ID_AA64PF
        cmp     x0, #ID_AA64PFR0_DIT_SUPPORTED                        cmp     x0, #ID_AA64PFR0_DIT_SUPPORTED
        bne     1f                                                    bne     1f
        mov     x0, #DIT_BIT                                          mov     x0, #DIT_BIT
        msr     DIT, x0                                               msr     DIT, x0
1:                                                            1:
        .endm                                                         .endm

/* -------------------------------------------------------    /* -------------------------------------------------------
 * This is the super set of actions that need to be perfor     * This is the super set of actions that need to be perfor
 * or a warm boot in EL3. This code is shared by BL1 and B     * or a warm boot in EL3. This code is shared by BL1 and B
 *                                                             *
 * This macro will always perform reset handling, architec     * This macro will always perform reset handling, architec
 * and stack setup. The rest of the actions are optional b     * and stack setup. The rest of the actions are optional b
 * be needed, depending on the context in which this macro     * be needed, depending on the context in which this macro
 * why this macro is parameterised ; each parameter allows     * why this macro is parameterised ; each parameter allows
 * some actions.                                               * some actions.
 *                                                             *
 *  _init_sctlr:                                               *  _init_sctlr:
 *      Whether the macro needs to initialise SCTLR_EL3, i     *      Whether the macro needs to initialise SCTLR_EL3, i
 *      the endianness of data accesses.                       *      the endianness of data accesses.
 *                                                             *
 *  _warm_boot_mailbox:                                        *  _warm_boot_mailbox:
 *      Whether the macro needs to detect the type of boot     *      Whether the macro needs to detect the type of boot
 *      detection is based on the platform entrypoint addr     *      detection is based on the platform entrypoint addr
 *      then it is a cold boot, otherwise it is a warm boo     *      then it is a cold boot, otherwise it is a warm boo
 *      this macro jumps on the platform entrypoint addres     *      this macro jumps on the platform entrypoint addres
 *                                                             *
 *  _secondary_cold_boot:                                      *  _secondary_cold_boot:
 *      Whether the macro needs to identify the CPU that i     *      Whether the macro needs to identify the CPU that i
 *      CPU or secondary CPU. The primary CPU will be allo     *      CPU or secondary CPU. The primary CPU will be allo
 *      the platform initialisations, while the secondarie     *      the platform initialisations, while the secondarie
 *      platform-specific state in the meantime.               *      platform-specific state in the meantime.
 *                                                             *
 *      If the caller knows this macro will only be called     *      If the caller knows this macro will only be called
 *      then this parameter can be defined to 0 to skip th     *      then this parameter can be defined to 0 to skip th
 *                                                             *
 * _init_memory:                                               * _init_memory:
 *      Whether the macro needs to initialise the memory.      *      Whether the macro needs to initialise the memory.
 *                                                             *
 * _init_c_runtime:                                            * _init_c_runtime:
 *      Whether the macro needs to initialise the C runtim     *      Whether the macro needs to initialise the C runtim
 *                                                             *
 * _exception_vectors:                                         * _exception_vectors:
 *      Address of the exception vectors to program in the     *      Address of the exception vectors to program in the
 *                                                             *
 * _pie_fixup_size:                                            * _pie_fixup_size:
 *      Size of memory region to fixup Global Descriptor T     *      Size of memory region to fixup Global Descriptor T
 *                                                             *
 *      A non-zero value is expected when firmware needs G     *      A non-zero value is expected when firmware needs G
 *                                                             *
 * -------------------------------------------------------     * -------------------------------------------------------
 */                                                            */
        .macro el3_entrypoint_common                       |          .macro el_max_entrypoint_common                   
                _init_sctlr, _warm_boot_mailbox, _secondar                    _init_sctlr, _warm_boot_mailbox, _secondar
                _init_memory, _init_c_runtime, _exception_                    _init_memory, _init_c_runtime, _exception_
                _pie_fixup_size                            |                  _pie_fixup_size, _el_max

        .if \_init_sctlr                                              .if \_init_sctlr
                /* ---------------------------------------                    /* ---------------------------------------
                 * This is the initialisation of SCTLR_EL3                     * This is the initialisation of SCTLR_EL3
                 * that all fields are explicitly set rath                     * that all fields are explicitly set rath
                 * Some fields reset to an IMPLEMENTATION                      * Some fields reset to an IMPLEMENTATION 
                 * others are architecturally UNKNOWN on r                     * others are architecturally UNKNOWN on r
                 *                                                             *
                                                           >                   * Although typically SCTLR_EL3 is thus in
                                                           >                   * includes the option to initialize any o
                                                           >                   * instead, as specified in the "_el_max" 
                                                           >                   *
                 * SCTLR.EE: Set the CPU endianness before                     * SCTLR.EE: Set the CPU endianness before
                 *  might involve memory reads or writes.                      *  might involve memory reads or writes. 
                 *  Little Endian.                                             *  Little Endian.
                 *                                                             *
                 * SCTLR_EL3.WXN: For the EL3 translation                      * SCTLR_EL3.WXN: For the EL3 translation 
                 *  force all memory regions that are writ                     *  force all memory regions that are writ
                 *  XN (Execute-never). Set to zero so tha                     *  XN (Execute-never). Set to zero so tha
                 *  effect on memory access permissions.                       *  effect on memory access permissions.
                 *                                                             *
                 * SCTLR_EL3.SA: Set to zero to disable St                     * SCTLR_EL3.SA: Set to zero to disable St
                 *                                                             *
                 * SCTLR_EL3.A: Set to zero to disable Ali                     * SCTLR_EL3.A: Set to zero to disable Ali
                 *                                                             *
                 * SCTLR.DSSBS: Set to zero to disable spe                     * SCTLR.DSSBS: Set to zero to disable spe
                 *  safe behaviour upon exception entry to                     *  safe behaviour upon exception entry to
                 * ---------------------------------------                     * ---------------------------------------
                 */                                                            */
                mov_imm x0, (SCTLR_RESET_VAL & ~(SCTLR_EE_                    mov_imm x0, (SCTLR_RESET_VAL & ~(SCTLR_EE_
                                | SCTLR_SA_BIT | SCTLR_A_B                                    | SCTLR_SA_BIT | SCTLR_A_B
                msr     sctlr_el3, x0                      |                  msr     sctlr_el\_el_max, x0
                isb                                                           isb
        .endif /* _init_sctlr */                                      .endif /* _init_sctlr */

#if DISABLE_MTPMU                                             #if DISABLE_MTPMU
                bl      mtpmu_disable                                         bl      mtpmu_disable
#endif                                                        #endif

        .if \_warm_boot_mailbox                                       .if \_warm_boot_mailbox
                /* ---------------------------------------                    /* ---------------------------------------
                 * This code will be executed for both war                     * This code will be executed for both war
                 * Now is the time to distinguish between                      * Now is the time to distinguish between 
                 * Query the platform entrypoint address a                     * Query the platform entrypoint address a
                 * then it means it is a warm boot so jump                     * then it means it is a warm boot so jump
                 * ---------------------------------------                     * ---------------------------------------
                 */                                                            */
                bl      plat_get_my_entrypoint                                bl      plat_get_my_entrypoint
                cbz     x0, do_cold_boot                                      cbz     x0, do_cold_boot
                br      x0                                                    br      x0

        do_cold_boot:                                                 do_cold_boot:
        .endif /* _warm_boot_mailbox */                               .endif /* _warm_boot_mailbox */

        .if \_pie_fixup_size                                          .if \_pie_fixup_size
#if ENABLE_PIE                                                #if ENABLE_PIE
                /*                                                            /*
                 * ---------------------------------------                     * ---------------------------------------
                 * If PIE is enabled fixup the Global desc                     * If PIE is enabled fixup the Global desc
                 * once during primary core cold boot path                     * once during primary core cold boot path
                 *                                                             *
                 * Compile time base address, required for                     * Compile time base address, required for
                 * using "pie_fixup" label present within                      * using "pie_fixup" label present within 
                 * ---------------------------------------                     * ---------------------------------------
                 */                                                            */
        pie_fixup:                                                    pie_fixup:
                ldr     x0, =pie_fixup                                        ldr     x0, =pie_fixup
                and     x0, x0, #~(PAGE_SIZE_MASK)                            and     x0, x0, #~(PAGE_SIZE_MASK)
                mov_imm x1, \_pie_fixup_size                                  mov_imm x1, \_pie_fixup_size
                add     x1, x1, x0                                            add     x1, x1, x0
                bl      fixup_gdt_reloc                                       bl      fixup_gdt_reloc
#endif /* ENABLE_PIE */                                       #endif /* ENABLE_PIE */
        .endif /* _pie_fixup_size */                                  .endif /* _pie_fixup_size */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Set the exception vectors.                                  * Set the exception vectors.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        adr     x0, \_exception_vectors                               adr     x0, \_exception_vectors
        msr     vbar_el3, x0                               |          msr     vbar_el\_el_max, x0
        isb                                                           isb

        /* -----------------------------------------------            /* -----------------------------------------------
         * It is a cold boot.                                          * It is a cold boot.
         * Perform any processor specific actions upon res             * Perform any processor specific actions upon res
         * invalidations etc.                                          * invalidations etc.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        bl      reset_handler                                         bl      reset_handler

        el3_arch_init_common                               |          el_max_arch_init_common \_el_max

        .if \_secondary_cold_boot                                     .if \_secondary_cold_boot
                /* ---------------------------------------                    /* ---------------------------------------
                 * Check if this is a primary or secondary                     * Check if this is a primary or secondary
                 * The primary CPU will set up the platfor                     * The primary CPU will set up the platfor
                 * secondaries are placed in a platform-sp                     * secondaries are placed in a platform-sp
                 * primary CPU performs the necessary acti                     * primary CPU performs the necessary acti
                 * of that state and allows entry into the                     * of that state and allows entry into the
                 * ---------------------------------------                     * ---------------------------------------
                 */                                                            */
                bl      plat_is_my_cpu_primary                                bl      plat_is_my_cpu_primary
                cbnz    w0, do_primary_cold_boot                              cbnz    w0, do_primary_cold_boot

                /* This is a cold boot on a secondary CPU                     /* This is a cold boot on a secondary CPU 
                bl      plat_secondary_cold_boot_setup                        bl      plat_secondary_cold_boot_setup
                /* plat_secondary_cold_boot_setup() is not                    /* plat_secondary_cold_boot_setup() is not
                bl      el3_panic                          |                  bl      el\_el_max\()_panic
                                                           <
        do_primary_cold_boot:                                         do_primary_cold_boot:
        .endif /* _secondary_cold_boot */                             .endif /* _secondary_cold_boot */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Initialize memory now. Secondary CPU initializa             * Initialize memory now. Secondary CPU initializa
         * point.                                                      * point.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */

        .if \_init_memory                                             .if \_init_memory
                bl      platform_mem_init                                     bl      platform_mem_init
        .endif /* _init_memory */                                     .endif /* _init_memory */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Init C runtime environment:                                 * Init C runtime environment:
         *   - Zero-initialise the NOBITS sections. There              *   - Zero-initialise the NOBITS sections. There 
         *       - the .bss section;                                   *       - the .bss section;
         *       - the coherent memory section (if any).               *       - the coherent memory section (if any).
         *   - Relocate the data section from ROM to RAM,              *   - Relocate the data section from ROM to RAM, 
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        .if \_init_c_runtime                                          .if \_init_c_runtime
#if defined(IMAGE_BL31) || (defined(IMAGE_BL2) && BL2_AT_E    #if defined(IMAGE_BL31) || (defined(IMAGE_BL2) && BL2_AT_E
                /* ---------------------------------------                    /* ---------------------------------------
                 * Invalidate the RW memory used by the BL                     * Invalidate the RW memory used by the BL
                 * includes the data and NOBITS sections.                      * includes the data and NOBITS sections. 
                 * safeguard against possible corruption o                     * safeguard against possible corruption o
                 * dirty cache lines in a system cache as                      * dirty cache lines in a system cache as 
                 * an earlier boot loader stage.                               * an earlier boot loader stage.
                 * ---------------------------------------                     * ---------------------------------------
                 */                                                            */
                adrp    x0, __RW_START__                                      adrp    x0, __RW_START__
                add     x0, x0, :lo12:__RW_START__                            add     x0, x0, :lo12:__RW_START__
                adrp    x1, __RW_END__                                        adrp    x1, __RW_END__
                add     x1, x1, :lo12:__RW_END__                              add     x1, x1, :lo12:__RW_END__
                sub     x1, x1, x0                                            sub     x1, x1, x0
                bl      inv_dcache_range                                      bl      inv_dcache_range
#if defined(IMAGE_BL31) && SEPARATE_NOBITS_REGION             #if defined(IMAGE_BL31) && SEPARATE_NOBITS_REGION
                adrp    x0, __NOBITS_START__                                  adrp    x0, __NOBITS_START__
                add     x0, x0, :lo12:__NOBITS_START__                        add     x0, x0, :lo12:__NOBITS_START__
                adrp    x1, __NOBITS_END__                                    adrp    x1, __NOBITS_END__
                add     x1, x1, :lo12:__NOBITS_END__                          add     x1, x1, :lo12:__NOBITS_END__
                sub     x1, x1, x0                                            sub     x1, x1, x0
                bl      inv_dcache_range                                      bl      inv_dcache_range
#endif                                                        #endif
#endif                                                        #endif
                adrp    x0, __BSS_START__                                     adrp    x0, __BSS_START__
                add     x0, x0, :lo12:__BSS_START__                           add     x0, x0, :lo12:__BSS_START__

                adrp    x1, __BSS_END__                                       adrp    x1, __BSS_END__
                add     x1, x1, :lo12:__BSS_END__                             add     x1, x1, :lo12:__BSS_END__
                sub     x1, x1, x0                                            sub     x1, x1, x0
                bl      zeromem                                               bl      zeromem

#if USE_COHERENT_MEM                                          #if USE_COHERENT_MEM
                adrp    x0, __COHERENT_RAM_START__                            adrp    x0, __COHERENT_RAM_START__
                add     x0, x0, :lo12:__COHERENT_RAM_START                    add     x0, x0, :lo12:__COHERENT_RAM_START
                adrp    x1, __COHERENT_RAM_END_UNALIGNED__                    adrp    x1, __COHERENT_RAM_END_UNALIGNED__
                add     x1, x1, :lo12: __COHERENT_RAM_END_                    add     x1, x1, :lo12: __COHERENT_RAM_END_
                sub     x1, x1, x0                                            sub     x1, x1, x0
                bl      zeromem                                               bl      zeromem
#endif                                                        #endif

#if defined(IMAGE_BL1) || (defined(IMAGE_BL2) && BL2_AT_EL    #if defined(IMAGE_BL1) || (defined(IMAGE_BL2) && BL2_AT_EL
                adrp    x0, __DATA_RAM_START__                                adrp    x0, __DATA_RAM_START__
                add     x0, x0, :lo12:__DATA_RAM_START__                      add     x0, x0, :lo12:__DATA_RAM_START__
                adrp    x1, __DATA_ROM_START__                                adrp    x1, __DATA_ROM_START__
                add     x1, x1, :lo12:__DATA_ROM_START__                      add     x1, x1, :lo12:__DATA_ROM_START__
                adrp    x2, __DATA_RAM_END__                                  adrp    x2, __DATA_RAM_END__
                add     x2, x2, :lo12:__DATA_RAM_END__                        add     x2, x2, :lo12:__DATA_RAM_END__
                sub     x2, x2, x0                                            sub     x2, x2, x0
                bl      memcpy16                                              bl      memcpy16
#endif                                                        #endif
        .endif /* _init_c_runtime */                                  .endif /* _init_c_runtime */

        /* -----------------------------------------------            /* -----------------------------------------------
         * Use SP_EL0 for the C runtime stack.                         * Use SP_EL0 for the C runtime stack.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        msr     spsel, #0                                             msr     spsel, #0

        /* -----------------------------------------------            /* -----------------------------------------------
         * Allocate a stack whose memory will be marked as             * Allocate a stack whose memory will be marked as
         * the MMU is enabled. There is no risk of reading             * the MMU is enabled. There is no risk of reading
         * after enabling the MMU as only the primary CPU              * after enabling the MMU as only the primary CPU 
         * moment.                                                     * moment.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        bl      plat_set_my_stack                                     bl      plat_set_my_stack

#if STACK_PROTECTOR_ENABLED                                   #if STACK_PROTECTOR_ENABLED
        .if \_init_c_runtime                                          .if \_init_c_runtime
        bl      update_stack_protector_canary                         bl      update_stack_protector_canary
        .endif /* _init_c_runtime */                                  .endif /* _init_c_runtime */
#endif                                                        #endif
        .endm                                                         .endm

        .macro  apply_at_speculative_wa                               .macro  apply_at_speculative_wa
#if ERRATA_SPECULATIVE_AT                                     #if ERRATA_SPECULATIVE_AT
        /*                                                            /*
         * Explicitly save x30 so as to free up a register             * Explicitly save x30 so as to free up a register
         * branching and also, save x29 which will be used             * branching and also, save x29 which will be used
         * function                                                    * function
         */                                                            */
        stp     x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GP            stp     x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GP
        bl      save_and_update_ptw_el1_sys_regs                      bl      save_and_update_ptw_el1_sys_regs
        ldp     x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GP            ldp     x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GP
#endif                                                        #endif
        .endm                                                         .endm

        .macro  restore_ptw_el1_sys_regs                              .macro  restore_ptw_el1_sys_regs
#if ERRATA_SPECULATIVE_AT                                     #if ERRATA_SPECULATIVE_AT
        /* -----------------------------------------------            /* -----------------------------------------------
         * In case of ERRATA_SPECULATIVE_AT, must follow b             * In case of ERRATA_SPECULATIVE_AT, must follow b
         * to ensure that page table walk is not enabled u             * to ensure that page table walk is not enabled u
         * restoration of all EL1 system registers. TCR_EL             * restoration of all EL1 system registers. TCR_EL
         * should be updated at the end which restores pre             * should be updated at the end which restores pre
         * table walk setting of stage1 i.e.(TCR_EL1.EPDx)             * table walk setting of stage1 i.e.(TCR_EL1.EPDx)
         * ensures that CPU does below steps in order.                 * ensures that CPU does below steps in order.
         *                                                             *
         * 1. Ensure all other system registers are writte             * 1. Ensure all other system registers are writte
         *    updating SCTLR_EL1 using ISB.                            *    updating SCTLR_EL1 using ISB.
         * 2. Restore SCTLR_EL1 register.                              * 2. Restore SCTLR_EL1 register.
         * 3. Ensure SCTLR_EL1 written successfully using              * 3. Ensure SCTLR_EL1 written successfully using 
         * 4. Restore TCR_EL1 register.                                * 4. Restore TCR_EL1 register.
         * -----------------------------------------------             * -----------------------------------------------
         */                                                            */
        isb                                                           isb
        ldp     x28, x29, [sp, #CTX_EL1_SYSREGS_OFFSET + C            ldp     x28, x29, [sp, #CTX_EL1_SYSREGS_OFFSET + C
        msr     sctlr_el1, x28                                        msr     sctlr_el1, x28
        isb                                                           isb
        msr     tcr_el1, x29                                          msr     tcr_el1, x29
#endif                                                        #endif
        .endm                                                         .endm

#endif /* EL3_COMMON_MACROS_S */                           |  #endif /* EL_MAX_COMMON_MACROS_S */
Last Author
garymorrison-arm
Last Edited
Jul 2 2021, 11:03 PM