blob: bdcd2cf708f7923748b81406d18fd49c89ddd0af [file] [log] [blame]
Achin Gupta7c88f3f2014-02-18 18:09:12 +00001/*
Masahiro Yamadade634f82020-01-17 13:45:14 +09002 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
Achin Gupta7c88f3f2014-02-18 18:09:12 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta7c88f3f2014-02-18 18:09:12 +00005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta7c88f3f2014-02-18 18:09:12 +00009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan2a30a752014-03-11 11:06:45 +000012ENTRY(tsp_entrypoint)
13
Achin Gupta7c88f3f2014-02-18 18:09:12 +000014
15MEMORY {
Sandrine Bailleux5ac3cc92014-05-20 17:22:24 +010016 RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
Achin Gupta7c88f3f2014-02-18 18:09:12 +000017}
18
19
20SECTIONS
21{
22 . = BL32_BASE;
Antonio Nino Diaz2ce2b092017-11-15 11:45:35 +000023 ASSERT(. == ALIGN(PAGE_SIZE),
Achin Gupta7c88f3f2014-02-18 18:09:12 +000024 "BL32_BASE address is not aligned on a page boundary.")
25
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010026#if SEPARATE_CODE_AND_RODATA
27 .text . : {
28 __TEXT_START__ = .;
29 *tsp_entrypoint.o(.text*)
30 *(.text*)
31 *(.vectors)
Roberto Vargasd93fde32018-04-11 11:53:31 +010032 . = ALIGN(PAGE_SIZE);
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010033 __TEXT_END__ = .;
34 } >RAM
35
36 .rodata . : {
37 __RODATA_START__ = .;
38 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090039
Masahiro Yamada583f8dd2020-03-26 10:57:12 +090040 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090041
Roberto Vargasd93fde32018-04-11 11:53:31 +010042 . = ALIGN(PAGE_SIZE);
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010043 __RODATA_END__ = .;
44 } >RAM
45#else
Achin Gupta7c88f3f2014-02-18 18:09:12 +000046 ro . : {
47 __RO_START__ = .;
Andrew Thoelkee01ea342014-03-18 07:13:52 +000048 *tsp_entrypoint.o(.text*)
49 *(.text*)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000050 *(.rodata*)
Masahiro Yamadade634f82020-01-17 13:45:14 +090051
Masahiro Yamada583f8dd2020-03-26 10:57:12 +090052 RODATA_COMMON
Masahiro Yamadade634f82020-01-17 13:45:14 +090053
Achin Gupta7c88f3f2014-02-18 18:09:12 +000054 *(.vectors)
Masahiro Yamadade634f82020-01-17 13:45:14 +090055
Achin Gupta7c88f3f2014-02-18 18:09:12 +000056 __RO_END_UNALIGNED__ = .;
57 /*
58 * Memory page(s) mapped to this section will be marked as
59 * read-only, executable. No RW data from the next section must
60 * creep in. Ensure the rest of the current memory page is unused.
61 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010062 . = ALIGN(PAGE_SIZE);
Achin Gupta7c88f3f2014-02-18 18:09:12 +000063 __RO_END__ = .;
64 } >RAM
Sandrine Bailleuxf91f1442016-07-08 14:37:40 +010065#endif
Achin Gupta7c88f3f2014-02-18 18:09:12 +000066
Achin Guptae9c4a642015-09-11 16:03:13 +010067 /*
68 * Define a linker symbol to mark start of the RW memory area for this
69 * image.
70 */
71 __RW_START__ = . ;
72
Masahiro Yamadac5864d82020-04-22 10:50:12 +090073 DATA_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000074
Masahiro Yamadade634f82020-01-17 13:45:14 +090075 /*
76 * .rela.dyn needs to come after .data for the read-elf utility to parse
77 * this section correctly. Ensure 8-byte alignment so that the fields of
78 * RELA data structure are aligned.
79 */
80 . = ALIGN(8);
81 __RELA_START__ = .;
82 .rela.dyn . : {
83 } >RAM
84 __RELA_END__ = .;
85
Dan Handley4fd2f5c2014-08-04 11:41:20 +010086#ifdef TSP_PROGBITS_LIMIT
87 ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
Sandrine Bailleuxe2e0c652014-06-16 16:12:27 +010088#endif
89
Masahiro Yamada403990e2020-04-07 13:04:24 +090090 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +090091 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +090092 XLAT_TABLE_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000093
Soby Mathew2ae20432015-01-08 18:02:44 +000094#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000095 /*
96 * The base address of the coherent memory section must be page-aligned (4K)
97 * to guarantee that the coherent data are stored on their own pages and
98 * are not mixed with normal data. This is required to set up the correct
99 * memory attributes for the coherent data page tables.
100 */
Antonio Nino Diaz2ce2b092017-11-15 11:45:35 +0000101 coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000102 __COHERENT_RAM_START__ = .;
103 *(tzfw_coherent_mem)
104 __COHERENT_RAM_END_UNALIGNED__ = .;
105 /*
106 * Memory page(s) mapped to this section will be marked
107 * as device memory. No other unexpected data must creep in.
108 * Ensure the rest of the current memory page is unused.
109 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100110 . = ALIGN(PAGE_SIZE);
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000111 __COHERENT_RAM_END__ = .;
112 } >RAM
Soby Mathew2ae20432015-01-08 18:02:44 +0000113#endif
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000114
Achin Guptae9c4a642015-09-11 16:03:13 +0100115 /*
116 * Define a linker symbol to mark the end of the RW memory area for this
117 * image.
118 */
119 __RW_END__ = .;
Sandrine Bailleuxe701e302014-05-20 17:28:25 +0100120 __BL32_END__ = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000121
Masahiro Yamadade634f82020-01-17 13:45:14 +0900122 /DISCARD/ : {
123 *(.dynsym .dynstr .hash .gnu.hash)
124 }
125
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000126 __BSS_SIZE__ = SIZEOF(.bss);
Soby Mathew2ae20432015-01-08 18:02:44 +0000127#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000128 __COHERENT_RAM_UNALIGNED_SIZE__ =
129 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Soby Mathew2ae20432015-01-08 18:02:44 +0000130#endif
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000131
Juan Castillo7d199412015-12-14 09:35:25 +0000132 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000133}