blob: b735f45e87b739c4170500952311d41ffc48fb2d [file] [log] [blame]
Achin Gupta7c88f3f2014-02-18 18:09:12 +00001/*
Chris Kay33bfc5e2023-02-14 11:30:04 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta7c88f3f2014-02-18 18:09:12 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta7c88f3f2014-02-18 18:09:12 +00005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta7c88f3f2014-02-18 18:09:12 +00009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan2a30a752014-03-11 11:06:45 +000012ENTRY(tsp_entrypoint)
13
Achin Gupta7c88f3f2014-02-18 18:09:12 +000014MEMORY {
Sandrine Bailleux5ac3cc92014-05-20 17:22:24 +010015 RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
Achin Gupta7c88f3f2014-02-18 18:09:12 +000016}
17
Chris Kay4b7660a2022-09-29 14:36:53 +010018SECTIONS {
Harrison Mutaic9f96d12023-04-19 09:30:15 +010019 RAM_REGION_START = ORIGIN(RAM);
20 RAM_REGION_LENGTH = LENGTH(RAM);
Achin Gupta7c88f3f2014-02-18 18:09:12 +000021 . = BL32_BASE;
Chris Kay4b7660a2022-09-29 14:36:53 +010022
Antonio Nino Diaz2ce2b092017-11-15 11:45:35 +000023 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kay4b7660a2022-09-29 14:36:53 +010024 "BL32_BASE address is not aligned on a page boundary.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +000025
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010026#if SEPARATE_CODE_AND_RODATA
27 .text . : {
28 __TEXT_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010029
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010030 *tsp_entrypoint.o(.text*)
31 *(.text*)
32 *(.vectors)
Chris Kay4b7660a2022-09-29 14:36:53 +010033
Roberto Vargasd93fde32018-04-11 11:53:31 +010034 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010035
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010036 __TEXT_END__ = .;
37 } >RAM
38
39 .rodata . : {
40 __RODATA_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010041
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010042 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090043
Chris Kay4b7660a2022-09-29 14:36:53 +010044 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090045
Roberto Vargasd93fde32018-04-11 11:53:31 +010046 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010047
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010048 __RODATA_END__ = .;
49 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010050#else /* SEPARATE_CODE_AND_RODATA */
Chris Kay33bfc5e2023-02-14 11:30:04 +000051 .ro . : {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000052 __RO_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010053
Andrew Thoelkee01ea342014-03-18 07:13:52 +000054 *tsp_entrypoint.o(.text*)
55 *(.text*)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000056 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090057
Chris Kay4b7660a2022-09-29 14:36:53 +010058 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090059
Achin Gupta7c88f3f2014-02-18 18:09:12 +000060 *(.vectors)
Masahiro Yamadade634f82020-01-17 13:45:14 +090061
Achin Gupta7c88f3f2014-02-18 18:09:12 +000062 __RO_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010063
Achin Gupta7c88f3f2014-02-18 18:09:12 +000064 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010065 * Memory page(s) mapped to this section will be marked as read-only,
66 * executable. No RW data from the next section must creep in. Ensure
67 * that the rest of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000068 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010069 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010070
Achin Gupta7c88f3f2014-02-18 18:09:12 +000071 __RO_END__ = .;
72 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010073#endif /* SEPARATE_CODE_AND_RODATA */
Achin Gupta7c88f3f2014-02-18 18:09:12 +000074
Chris Kay4b7660a2022-09-29 14:36:53 +010075 __RW_START__ = .;
Achin Guptae9c4a642015-09-11 16:03:13 +010076
Masahiro Yamadac5864d82020-04-22 10:50:12 +090077 DATA_SECTION >RAM
Masahiro Yamada85fa00e2020-04-22 11:27:55 +090078 RELA_SECTION >RAM
Masahiro Yamadade634f82020-01-17 13:45:14 +090079
Dan Handley4fd2f5c2014-08-04 11:41:20 +010080#ifdef TSP_PROGBITS_LIMIT
81 ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
Chris Kay4b7660a2022-09-29 14:36:53 +010082#endif /* TSP_PROGBITS_LIMIT */
Sandrine Bailleuxe2e0c652014-06-16 16:12:27 +010083
Masahiro Yamada403990e2020-04-07 13:04:24 +090084 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +090085 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +090086 XLAT_TABLE_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000087
Soby Mathew2ae20432015-01-08 18:02:44 +000088#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000089 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010090 * The base address of the coherent memory section must be page-aligned to
91 * guarantee that the coherent data are stored on their own pages and are
92 * not mixed with normal data. This is required to set up the correct memory
93 * attributes for the coherent data page tables.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000094 */
Chris Kay33bfc5e2023-02-14 11:30:04 +000095 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000096 __COHERENT_RAM_START__ = .;
Chris Kay33bfc5e2023-02-14 11:30:04 +000097 *(.tzfw_coherent_mem)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000098 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010099
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000100 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100101 * Memory page(s) mapped to this section will be marked as device
102 * memory. No other unexpected data must creep in. Ensure that the rest
103 * of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000104 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100105 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +0100106
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000107 __COHERENT_RAM_END__ = .;
108 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +0100109#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000110
Achin Guptae9c4a642015-09-11 16:03:13 +0100111 __RW_END__ = .;
Sandrine Bailleuxe701e302014-05-20 17:28:25 +0100112 __BL32_END__ = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000113
Masahiro Yamadade634f82020-01-17 13:45:14 +0900114 /DISCARD/ : {
115 *(.dynsym .dynstr .hash .gnu.hash)
116 }
117
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000118 __BSS_SIZE__ = SIZEOF(.bss);
Chris Kay4b7660a2022-09-29 14:36:53 +0100119
Soby Mathew2ae20432015-01-08 18:02:44 +0000120#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000121 __COHERENT_RAM_UNALIGNED_SIZE__ =
122 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kay4b7660a2022-09-29 14:36:53 +0100123#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000124
Juan Castillo7d199412015-12-14 09:35:25 +0000125 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Harrison Mutaic9f96d12023-04-19 09:30:15 +0100126 RAM_REGION_END = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000127}