blob: a6658dd7c6d09337e2f6b88fe440475d5edf0fb3 [file] [log] [blame]
Achin Gupta7c88f3f2014-02-18 18:09:12 +00001/*
Chris Kay33bfc5e2023-02-14 11:30:04 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta7c88f3f2014-02-18 18:09:12 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta7c88f3f2014-02-18 18:09:12 +00005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta7c88f3f2014-02-18 18:09:12 +00009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan2a30a752014-03-11 11:06:45 +000012ENTRY(tsp_entrypoint)
13
Achin Gupta7c88f3f2014-02-18 18:09:12 +000014MEMORY {
Sandrine Bailleux5ac3cc92014-05-20 17:22:24 +010015 RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
Achin Gupta7c88f3f2014-02-18 18:09:12 +000016}
17
Chris Kay4b7660a2022-09-29 14:36:53 +010018SECTIONS {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000019 . = BL32_BASE;
Chris Kay4b7660a2022-09-29 14:36:53 +010020
Antonio Nino Diaz2ce2b092017-11-15 11:45:35 +000021 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kay4b7660a2022-09-29 14:36:53 +010022 "BL32_BASE address is not aligned on a page boundary.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +000023
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010024#if SEPARATE_CODE_AND_RODATA
25 .text . : {
26 __TEXT_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010027
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010028 *tsp_entrypoint.o(.text*)
29 *(.text*)
30 *(.vectors)
Chris Kay4b7660a2022-09-29 14:36:53 +010031
Roberto Vargasd93fde32018-04-11 11:53:31 +010032 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010033
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010034 __TEXT_END__ = .;
35 } >RAM
36
37 .rodata . : {
38 __RODATA_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010039
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010040 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090041
Chris Kay4b7660a2022-09-29 14:36:53 +010042 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090043
Roberto Vargasd93fde32018-04-11 11:53:31 +010044 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010045
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010046 __RODATA_END__ = .;
47 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010048#else /* SEPARATE_CODE_AND_RODATA */
Chris Kay33bfc5e2023-02-14 11:30:04 +000049 .ro . : {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000050 __RO_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010051
Andrew Thoelkee01ea342014-03-18 07:13:52 +000052 *tsp_entrypoint.o(.text*)
53 *(.text*)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000054 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090055
Chris Kay4b7660a2022-09-29 14:36:53 +010056 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090057
Achin Gupta7c88f3f2014-02-18 18:09:12 +000058 *(.vectors)
Masahiro Yamadade634f82020-01-17 13:45:14 +090059
Achin Gupta7c88f3f2014-02-18 18:09:12 +000060 __RO_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010061
Achin Gupta7c88f3f2014-02-18 18:09:12 +000062 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010063 * Memory page(s) mapped to this section will be marked as read-only,
64 * executable. No RW data from the next section must creep in. Ensure
65 * that the rest of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000066 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010067 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010068
Achin Gupta7c88f3f2014-02-18 18:09:12 +000069 __RO_END__ = .;
70 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010071#endif /* SEPARATE_CODE_AND_RODATA */
Achin Gupta7c88f3f2014-02-18 18:09:12 +000072
Chris Kay4b7660a2022-09-29 14:36:53 +010073 __RW_START__ = .;
Achin Guptae9c4a642015-09-11 16:03:13 +010074
Masahiro Yamadac5864d82020-04-22 10:50:12 +090075 DATA_SECTION >RAM
Masahiro Yamada85fa00e2020-04-22 11:27:55 +090076 RELA_SECTION >RAM
Masahiro Yamadade634f82020-01-17 13:45:14 +090077
Dan Handley4fd2f5c2014-08-04 11:41:20 +010078#ifdef TSP_PROGBITS_LIMIT
79 ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
Chris Kay4b7660a2022-09-29 14:36:53 +010080#endif /* TSP_PROGBITS_LIMIT */
Sandrine Bailleuxe2e0c652014-06-16 16:12:27 +010081
Masahiro Yamada403990e2020-04-07 13:04:24 +090082 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +090083 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +090084 XLAT_TABLE_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000085
Soby Mathew2ae20432015-01-08 18:02:44 +000086#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000087 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010088 * The base address of the coherent memory section must be page-aligned to
89 * guarantee that the coherent data are stored on their own pages and are
90 * not mixed with normal data. This is required to set up the correct memory
91 * attributes for the coherent data page tables.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000092 */
Chris Kay33bfc5e2023-02-14 11:30:04 +000093 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000094 __COHERENT_RAM_START__ = .;
Chris Kay33bfc5e2023-02-14 11:30:04 +000095 *(.tzfw_coherent_mem)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000096 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010097
Achin Gupta7c88f3f2014-02-18 18:09:12 +000098 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010099 * Memory page(s) mapped to this section will be marked as device
100 * memory. No other unexpected data must creep in. Ensure that the rest
101 * of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000102 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100103 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +0100104
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000105 __COHERENT_RAM_END__ = .;
106 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +0100107#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000108
Achin Guptae9c4a642015-09-11 16:03:13 +0100109 __RW_END__ = .;
Sandrine Bailleuxe701e302014-05-20 17:28:25 +0100110 __BL32_END__ = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000111
Masahiro Yamadade634f82020-01-17 13:45:14 +0900112 /DISCARD/ : {
113 *(.dynsym .dynstr .hash .gnu.hash)
114 }
115
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000116 __BSS_SIZE__ = SIZEOF(.bss);
Chris Kay4b7660a2022-09-29 14:36:53 +0100117
Soby Mathew2ae20432015-01-08 18:02:44 +0000118#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000119 __COHERENT_RAM_UNALIGNED_SIZE__ =
120 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kay4b7660a2022-09-29 14:36:53 +0100121#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000122
Juan Castillo7d199412015-12-14 09:35:25 +0000123 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000124}