blob: 22bf11dadecd6f5c0ac5b2aa1af8f093e3bd898d [file] [log] [blame]
Achin Gupta7c88f3f2014-02-18 18:09:12 +00001/*
Chris Kay33bfc5e2023-02-14 11:30:04 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta7c88f3f2014-02-18 18:09:12 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta7c88f3f2014-02-18 18:09:12 +00005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta7c88f3f2014-02-18 18:09:12 +00009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan2a30a752014-03-11 11:06:45 +000012ENTRY(tsp_entrypoint)
13
Achin Gupta7c88f3f2014-02-18 18:09:12 +000014MEMORY {
Sandrine Bailleux5ac3cc92014-05-20 17:22:24 +010015 RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
Achin Gupta7c88f3f2014-02-18 18:09:12 +000016}
17
Chris Kay4b7660a2022-09-29 14:36:53 +010018SECTIONS {
Harrison Mutaic9f96d12023-04-19 09:30:15 +010019 RAM_REGION_START = ORIGIN(RAM);
20 RAM_REGION_LENGTH = LENGTH(RAM);
Achin Gupta7c88f3f2014-02-18 18:09:12 +000021 . = BL32_BASE;
Chris Kay4b7660a2022-09-29 14:36:53 +010022
Antonio Nino Diaz2ce2b092017-11-15 11:45:35 +000023 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kay4b7660a2022-09-29 14:36:53 +010024 "BL32_BASE address is not aligned on a page boundary.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +000025
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010026#if SEPARATE_CODE_AND_RODATA
27 .text . : {
28 __TEXT_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010029
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010030 *tsp_entrypoint.o(.text*)
31 *(.text*)
32 *(.vectors)
Michal Simek80c530e2023-04-27 14:26:03 +020033 __TEXT_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010034
Roberto Vargasd93fde32018-04-11 11:53:31 +010035 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010036
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010037 __TEXT_END__ = .;
38 } >RAM
39
40 .rodata . : {
41 __RODATA_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010042
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010043 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090044
Chris Kay4b7660a2022-09-29 14:36:53 +010045 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090046
Michal Simek80c530e2023-04-27 14:26:03 +020047 __RODATA_END_UNALIGNED__ = .;
Roberto Vargasd93fde32018-04-11 11:53:31 +010048 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010049
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010050 __RODATA_END__ = .;
51 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010052#else /* SEPARATE_CODE_AND_RODATA */
Chris Kay33bfc5e2023-02-14 11:30:04 +000053 .ro . : {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000054 __RO_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010055
Andrew Thoelkee01ea342014-03-18 07:13:52 +000056 *tsp_entrypoint.o(.text*)
57 *(.text*)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000058 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090059
Chris Kay4b7660a2022-09-29 14:36:53 +010060 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090061
Achin Gupta7c88f3f2014-02-18 18:09:12 +000062 *(.vectors)
Masahiro Yamadade634f82020-01-17 13:45:14 +090063
Achin Gupta7c88f3f2014-02-18 18:09:12 +000064 __RO_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010065
Achin Gupta7c88f3f2014-02-18 18:09:12 +000066 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010067 * Memory page(s) mapped to this section will be marked as read-only,
68 * executable. No RW data from the next section must creep in. Ensure
69 * that the rest of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000070 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010071 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010072
Achin Gupta7c88f3f2014-02-18 18:09:12 +000073 __RO_END__ = .;
74 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010075#endif /* SEPARATE_CODE_AND_RODATA */
Achin Gupta7c88f3f2014-02-18 18:09:12 +000076
Chris Kay4b7660a2022-09-29 14:36:53 +010077 __RW_START__ = .;
Achin Guptae9c4a642015-09-11 16:03:13 +010078
Masahiro Yamadac5864d82020-04-22 10:50:12 +090079 DATA_SECTION >RAM
Masahiro Yamada85fa00e2020-04-22 11:27:55 +090080 RELA_SECTION >RAM
Masahiro Yamadade634f82020-01-17 13:45:14 +090081
Dan Handley4fd2f5c2014-08-04 11:41:20 +010082#ifdef TSP_PROGBITS_LIMIT
83 ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
Chris Kay4b7660a2022-09-29 14:36:53 +010084#endif /* TSP_PROGBITS_LIMIT */
Sandrine Bailleuxe2e0c652014-06-16 16:12:27 +010085
Masahiro Yamada403990e2020-04-07 13:04:24 +090086 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +090087 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +090088 XLAT_TABLE_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000089
Soby Mathew2ae20432015-01-08 18:02:44 +000090#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000091 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010092 * The base address of the coherent memory section must be page-aligned to
93 * guarantee that the coherent data are stored on their own pages and are
94 * not mixed with normal data. This is required to set up the correct memory
95 * attributes for the coherent data page tables.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000096 */
Chris Kay33bfc5e2023-02-14 11:30:04 +000097 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000098 __COHERENT_RAM_START__ = .;
Chris Kay33bfc5e2023-02-14 11:30:04 +000099 *(.tzfw_coherent_mem)
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000100 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +0100101
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000102 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100103 * Memory page(s) mapped to this section will be marked as device
104 * memory. No other unexpected data must creep in. Ensure that the rest
105 * of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000106 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100107 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +0100108
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000109 __COHERENT_RAM_END__ = .;
110 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +0100111#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000112
Achin Guptae9c4a642015-09-11 16:03:13 +0100113 __RW_END__ = .;
Sandrine Bailleuxe701e302014-05-20 17:28:25 +0100114 __BL32_END__ = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000115
Masahiro Yamadade634f82020-01-17 13:45:14 +0900116 /DISCARD/ : {
117 *(.dynsym .dynstr .hash .gnu.hash)
118 }
119
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000120 __BSS_SIZE__ = SIZEOF(.bss);
Chris Kay4b7660a2022-09-29 14:36:53 +0100121
Soby Mathew2ae20432015-01-08 18:02:44 +0000122#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000123 __COHERENT_RAM_UNALIGNED_SIZE__ =
124 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kay4b7660a2022-09-29 14:36:53 +0100125#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000126
Juan Castillo7d199412015-12-14 09:35:25 +0000127 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Harrison Mutaic9f96d12023-04-19 09:30:15 +0100128 RAM_REGION_END = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000129}