blob: bc8cbfd653601935e92ff47672a6ccad86ffb2e8 [file] [log] [blame]
Achin Gupta4f6ad662013-10-25 09:08:21 +01001/*
Soby Mathew73308d02018-01-09 14:36:14 +00002 * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
Achin Gupta4f6ad662013-10-25 09:08:21 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta4f6ad662013-10-25 09:08:21 +01005 */
6
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +00007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +01009#include <bl_common.h>
Achin Gupta4f6ad662013-10-25 09:08:21 +010010
11
12 .globl bl2_entrypoint
13
14
Achin Gupta4f6ad662013-10-25 09:08:21 +010015
Andrew Thoelke38bde412014-03-18 13:46:55 +000016func bl2_entrypoint
Achin Gupta4f6ad662013-10-25 09:08:21 +010017 /*---------------------------------------------
Soby Mathew73308d02018-01-09 14:36:14 +000018 * Save arguments x0 - x3 from BL1 for future
19 * use.
Achin Gupta4f6ad662013-10-25 09:08:21 +010020 * ---------------------------------------------
Antonio Nino Diaz1f21bcf2016-02-01 13:57:25 +000021 */
Soby Mathew73308d02018-01-09 14:36:14 +000022 mov x20, x0
23 mov x21, x1
24 mov x22, x2
25 mov x23, x3
Achin Gupta4f6ad662013-10-25 09:08:21 +010026
27 /* ---------------------------------------------
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000028 * Set the exception vector to something sane.
29 * ---------------------------------------------
30 */
31 adr x0, early_exceptions
32 msr vbar_el1, x0
Achin Guptaed1744e2014-08-04 23:13:10 +010033 isb
34
35 /* ---------------------------------------------
36 * Enable the SError interrupt now that the
37 * exception vectors have been setup.
38 * ---------------------------------------------
39 */
40 msr daifclr, #DAIF_ABT_BIT
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000041
42 /* ---------------------------------------------
Achin Gupta9f098352014-07-18 18:38:28 +010043 * Enable the instruction cache, stack pointer
44 * and data access alignment checks
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000045 * ---------------------------------------------
46 */
Achin Gupta9f098352014-07-18 18:38:28 +010047 mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000048 mrs x0, sctlr_el1
Achin Gupta9f098352014-07-18 18:38:28 +010049 orr x0, x0, x1
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000050 msr sctlr_el1, x0
Sandrine Bailleuxc10bd2c2013-11-12 16:41:16 +000051 isb
52
Sandrine Bailleux65f546a2013-11-28 09:43:06 +000053 /* ---------------------------------------------
Achin Guptae9c4a642015-09-11 16:03:13 +010054 * Invalidate the RW memory used by the BL2
55 * image. This includes the data and NOBITS
56 * sections. This is done to safeguard against
57 * possible corruption of this memory by dirty
58 * cache lines in a system cache as a result of
59 * use by an earlier boot loader stage.
60 * ---------------------------------------------
61 */
62 adr x0, __RW_START__
63 adr x1, __RW_END__
64 sub x1, x1, x0
65 bl inv_dcache_range
66
67 /* ---------------------------------------------
Sandrine Bailleux65f546a2013-11-28 09:43:06 +000068 * Zero out NOBITS sections. There are 2 of them:
69 * - the .bss section;
70 * - the coherent memory section.
71 * ---------------------------------------------
72 */
73 ldr x0, =__BSS_START__
74 ldr x1, =__BSS_SIZE__
Douglas Raillard21362a92016-12-02 13:51:54 +000075 bl zeromem
Sandrine Bailleux65f546a2013-11-28 09:43:06 +000076
Soby Mathew2ae20432015-01-08 18:02:44 +000077#if USE_COHERENT_MEM
Sandrine Bailleux65f546a2013-11-28 09:43:06 +000078 ldr x0, =__COHERENT_RAM_START__
79 ldr x1, =__COHERENT_RAM_UNALIGNED_SIZE__
Douglas Raillard21362a92016-12-02 13:51:54 +000080 bl zeromem
Soby Mathew2ae20432015-01-08 18:02:44 +000081#endif
Sandrine Bailleux65f546a2013-11-28 09:43:06 +000082
Achin Gupta4f6ad662013-10-25 09:08:21 +010083 /* --------------------------------------------
Achin Guptaf4a97092014-06-25 19:26:22 +010084 * Allocate a stack whose memory will be marked
85 * as Normal-IS-WBWA when the MMU is enabled.
86 * There is no risk of reading stale stack
87 * memory after enabling the MMU as only the
88 * primary cpu is running at the moment.
Achin Gupta4f6ad662013-10-25 09:08:21 +010089 * --------------------------------------------
90 */
Soby Mathew3700a922015-07-13 11:21:11 +010091 bl plat_set_my_stack
Achin Gupta4f6ad662013-10-25 09:08:21 +010092
93 /* ---------------------------------------------
Douglas Raillard306593d2017-02-24 18:14:15 +000094 * Initialize the stack protector canary before
95 * any C code is called.
96 * ---------------------------------------------
97 */
98#if STACK_PROTECTOR_ENABLED
99 bl update_stack_protector_canary
100#endif
101
102 /* ---------------------------------------------
Achin Gupta4f6ad662013-10-25 09:08:21 +0100103 * Perform early platform setup & platform
104 * specific early arch. setup e.g. mmu setup
105 * ---------------------------------------------
106 */
Yatharth Kochar57d334c2015-10-29 12:47:02 +0000107 mov x0, x20
Soby Mathew73308d02018-01-09 14:36:14 +0000108 mov x1, x21
109 mov x2, x22
110 mov x3, x23
111 bl bl2_early_platform_setup2
112
Achin Gupta4f6ad662013-10-25 09:08:21 +0100113 bl bl2_plat_arch_setup
114
115 /* ---------------------------------------------
Achin Gupta4f6ad662013-10-25 09:08:21 +0100116 * Jump to main function.
117 * ---------------------------------------------
118 */
119 bl bl2_main
Antonio Nino Diaz1f21bcf2016-02-01 13:57:25 +0000120
121 /* ---------------------------------------------
122 * Should never reach this point.
123 * ---------------------------------------------
124 */
Jeenu Viswambharan68aef102016-11-30 15:21:11 +0000125 no_ret plat_panic_handler
Antonio Nino Diaz1f21bcf2016-02-01 13:57:25 +0000126
Kévin Petita877c252015-03-24 14:03:57 +0000127endfunc bl2_entrypoint