blob: 1695e1e0a05bef5f24dcc6278d93f2c218dafcba [file] [log] [blame]
Soby Mathewec8ac1c2016-05-05 14:32:05 +01001/*
Chris Kay33bfc5e2023-02-14 11:30:04 +00002 * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
Soby Mathewec8ac1c2016-05-05 14:32:05 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathewec8ac1c2016-05-05 14:32:05 +01005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Soby Mathewec8ac1c2016-05-05 14:32:05 +01009
10OUTPUT_FORMAT(elf32-littlearm)
11OUTPUT_ARCH(arm)
12ENTRY(sp_min_vector_table)
13
14MEMORY {
15 RAM (rwx): ORIGIN = BL32_BASE, LENGTH = BL32_LIMIT - BL32_BASE
16}
17
Heiko Stuebner95ba3552019-04-11 15:26:07 +020018#ifdef PLAT_SP_MIN_EXTRA_LD_SCRIPT
Chris Kay4b7660a2022-09-29 14:36:53 +010019# include <plat_sp_min.ld.S>
20#endif /* PLAT_SP_MIN_EXTRA_LD_SCRIPT */
Soby Mathewec8ac1c2016-05-05 14:32:05 +010021
Chris Kay4b7660a2022-09-29 14:36:53 +010022SECTIONS {
Soby Mathewec8ac1c2016-05-05 14:32:05 +010023 . = BL32_BASE;
Chris Kay4b7660a2022-09-29 14:36:53 +010024
Yann Gautier876be652020-10-05 09:54:09 +020025 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kay4b7660a2022-09-29 14:36:53 +010026 "BL32_BASE address is not aligned on a page boundary.")
Soby Mathewec8ac1c2016-05-05 14:32:05 +010027
28#if SEPARATE_CODE_AND_RODATA
29 .text . : {
30 __TEXT_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010031
Soby Mathewec8ac1c2016-05-05 14:32:05 +010032 *entrypoint.o(.text*)
Yann Gautier876be652020-10-05 09:54:09 +020033 *(SORT_BY_ALIGNMENT(.text*))
Yatharth Kochar06460cd2016-06-30 15:02:31 +010034 *(.vectors)
Chris Kay4b7660a2022-09-29 14:36:53 +010035
Roberto Vargasd93fde32018-04-11 11:53:31 +010036 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010037
Soby Mathewec8ac1c2016-05-05 14:32:05 +010038 __TEXT_END__ = .;
39 } >RAM
40
Chris Kay4b7660a2022-09-29 14:36:53 +010041 /* .ARM.extab and .ARM.exidx are only added because Clang needs them */
42 .ARM.extab . : {
Roberto Vargas1d04c632018-05-10 11:01:16 +010043 *(.ARM.extab* .gnu.linkonce.armextab.*)
Chris Kay4b7660a2022-09-29 14:36:53 +010044 } >RAM
Roberto Vargas1d04c632018-05-10 11:01:16 +010045
Chris Kay4b7660a2022-09-29 14:36:53 +010046 .ARM.exidx . : {
Roberto Vargas1d04c632018-05-10 11:01:16 +010047 *(.ARM.exidx* .gnu.linkonce.armexidx.*)
Chris Kay4b7660a2022-09-29 14:36:53 +010048 } >RAM
Roberto Vargas1d04c632018-05-10 11:01:16 +010049
Soby Mathewec8ac1c2016-05-05 14:32:05 +010050 .rodata . : {
51 __RODATA_START__ = .;
Yann Gautier876be652020-10-05 09:54:09 +020052 *(SORT_BY_ALIGNMENT(.rodata*))
Soby Mathewec8ac1c2016-05-05 14:32:05 +010053
Chris Kay4b7660a2022-09-29 14:36:53 +010054 RODATA_COMMON
Soby Mathewec8ac1c2016-05-05 14:32:05 +010055
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010056 . = ALIGN(8);
Chris Kay4b7660a2022-09-29 14:36:53 +010057
58# include <lib/el3_runtime/pubsub_events.h>
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010059
Roberto Vargasd93fde32018-04-11 11:53:31 +010060 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010061
Soby Mathewec8ac1c2016-05-05 14:32:05 +010062 __RODATA_END__ = .;
63 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010064#else /* SEPARATE_CODE_AND_RODATA */
Chris Kay33bfc5e2023-02-14 11:30:04 +000065 .ro . : {
Soby Mathewec8ac1c2016-05-05 14:32:05 +010066 __RO_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010067
Soby Mathewec8ac1c2016-05-05 14:32:05 +010068 *entrypoint.o(.text*)
Yann Gautier876be652020-10-05 09:54:09 +020069 *(SORT_BY_ALIGNMENT(.text*))
70 *(SORT_BY_ALIGNMENT(.rodata*))
Soby Mathewec8ac1c2016-05-05 14:32:05 +010071
Chris Kay4b7660a2022-09-29 14:36:53 +010072 RODATA_COMMON
Soby Mathewec8ac1c2016-05-05 14:32:05 +010073
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010074 . = ALIGN(8);
Chris Kay4b7660a2022-09-29 14:36:53 +010075
76# include <lib/el3_runtime/pubsub_events.h>
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010077
Yatharth Kochar06460cd2016-06-30 15:02:31 +010078 *(.vectors)
Chris Kay4b7660a2022-09-29 14:36:53 +010079
Soby Mathewec8ac1c2016-05-05 14:32:05 +010080 __RO_END_UNALIGNED__ = .;
81
82 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010083 * Memory page(s) mapped to this section will be marked as device
84 * memory. No other unexpected data must creep in. Ensure that the rest
85 * of the current memory page is unused.
Soby Mathewec8ac1c2016-05-05 14:32:05 +010086 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010087 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010088
Soby Mathewec8ac1c2016-05-05 14:32:05 +010089 __RO_END__ = .;
90 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010091#endif /* SEPARATE_CODE_AND_RODATA */
Soby Mathewec8ac1c2016-05-05 14:32:05 +010092
93 ASSERT(__CPU_OPS_END__ > __CPU_OPS_START__,
Chris Kay4b7660a2022-09-29 14:36:53 +010094 "cpu_ops not defined for this platform.")
95
96 __RW_START__ = .;
Soby Mathewec8ac1c2016-05-05 14:32:05 +010097
Masahiro Yamadac5864d82020-04-22 10:50:12 +090098 DATA_SECTION >RAM
Yann Gautier514e59c2020-10-05 11:02:54 +020099 RELA_SECTION >RAM
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100100
Soby Mathewbf169232017-11-14 14:10:10 +0000101#ifdef BL32_PROGBITS_LIMIT
102 ASSERT(. <= BL32_PROGBITS_LIMIT, "BL32 progbits has exceeded its limit.")
Chris Kay4b7660a2022-09-29 14:36:53 +0100103#endif /* BL32_PROGBITS_LIMIT */
Soby Mathewbf169232017-11-14 14:10:10 +0000104
Masahiro Yamada403990e2020-04-07 13:04:24 +0900105 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +0900106 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +0900107 XLAT_TABLE_SECTION >RAM
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100108
Chris Kay4b7660a2022-09-29 14:36:53 +0100109 __BSS_SIZE__ = SIZEOF(.bss);
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100110
111#if USE_COHERENT_MEM
112 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100113 * The base address of the coherent memory section must be page-aligned to
114 * guarantee that the coherent data are stored on their own pages and are
115 * not mixed with normal data. This is required to set up the correct
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100116 * memory attributes for the coherent data page tables.
117 */
Chris Kay33bfc5e2023-02-14 11:30:04 +0000118 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100119 __COHERENT_RAM_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +0100120
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100121 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100122 * Bakery locks are stored in coherent memory. Each lock's data is
123 * contiguous and fully allocated by the compiler.
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100124 */
Chris Kay33bfc5e2023-02-14 11:30:04 +0000125 *(.bakery_lock)
126 *(.tzfw_coherent_mem)
Chris Kay4b7660a2022-09-29 14:36:53 +0100127
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100128 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +0100129
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100130 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100131 * Memory page(s) mapped to this section will be marked as device
132 * memory. No other unexpected data must creep in. Ensure that the rest
133 * of the current memory page is unused.
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100134 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100135 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +0100136
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100137 __COHERENT_RAM_END__ = .;
138 } >RAM
139
140 __COHERENT_RAM_UNALIGNED_SIZE__ =
141 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kay4b7660a2022-09-29 14:36:53 +0100142#endif /* USE_COHERENT_MEM */
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100143
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100144 __RW_END__ = .;
Yann Gautier876be652020-10-05 09:54:09 +0200145 __BL32_END__ = .;
Yann Gautier1b4d6ae2020-10-05 11:39:19 +0200146
Yann Gautier514e59c2020-10-05 11:02:54 +0200147 /DISCARD/ : {
148 *(.dynsym .dynstr .hash .gnu.hash)
149 }
150
Yann Gautier1b4d6ae2020-10-05 11:39:19 +0200151 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100152}