blob: ea0902cc07ad67621932c2e488917214e4047ef2 [file] [log] [blame]
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +03001/*
2 * Copyright (C) 2018 Marvell International Ltd.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 * https://spdx.org/licenses
6 */
7
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +03008#include <assert.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +030010#include <platform_def.h>
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +030011
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000012#include <arch.h>
13#include <arch_helpers.h>
14#include <common/debug.h>
15#include <lib/mmio.h>
16#include <lib/xlat_tables/xlat_tables.h>
17
18#include <plat_marvell.h>
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +030019
20/* Weak definitions may be overridden in specific ARM standard platform */
21#pragma weak plat_get_ns_image_entrypoint
22#pragma weak plat_marvell_get_mmap
23
24/*
25 * Set up the page tables for the generic and platform-specific memory regions.
26 * The extents of the generic memory regions are specified by the function
27 * arguments and consist of:
28 * - Trusted SRAM seen by the BL image;
29 * - Code section;
30 * - Read-only data section;
31 * - Coherent memory region, if applicable.
32 */
33void marvell_setup_page_tables(uintptr_t total_base,
34 size_t total_size,
35 uintptr_t code_start,
36 uintptr_t code_limit,
37 uintptr_t rodata_start,
38 uintptr_t rodata_limit
39#if USE_COHERENT_MEM
40 ,
41 uintptr_t coh_start,
42 uintptr_t coh_limit
43#endif
44 )
45{
46 /*
47 * Map the Trusted SRAM with appropriate memory attributes.
48 * Subsequent mappings will adjust the attributes for specific regions.
49 */
50 VERBOSE("Trusted SRAM seen by this BL image: %p - %p\n",
51 (void *) total_base, (void *) (total_base + total_size));
52 mmap_add_region(total_base, total_base,
53 total_size,
54 MT_MEMORY | MT_RW | MT_SECURE);
55
56 /* Re-map the code section */
57 VERBOSE("Code region: %p - %p\n",
58 (void *) code_start, (void *) code_limit);
59 mmap_add_region(code_start, code_start,
60 code_limit - code_start,
61 MT_CODE | MT_SECURE);
62
63 /* Re-map the read-only data section */
64 VERBOSE("Read-only data region: %p - %p\n",
65 (void *) rodata_start, (void *) rodata_limit);
66 mmap_add_region(rodata_start, rodata_start,
67 rodata_limit - rodata_start,
68 MT_RO_DATA | MT_SECURE);
69
70#if USE_COHERENT_MEM
71 /* Re-map the coherent memory region */
72 VERBOSE("Coherent region: %p - %p\n",
73 (void *) coh_start, (void *) coh_limit);
74 mmap_add_region(coh_start, coh_start,
75 coh_limit - coh_start,
76 MT_DEVICE | MT_RW | MT_SECURE);
77#endif
78
79 /* Now (re-)map the platform-specific memory regions */
80 mmap_add(plat_marvell_get_mmap());
81
82 /* Create the page tables to reflect the above mappings */
83 init_xlat_tables();
84}
85
86unsigned long plat_get_ns_image_entrypoint(void)
87{
88 return PLAT_MARVELL_NS_IMAGE_OFFSET;
89}
90
91/*****************************************************************************
92 * Gets SPSR for BL32 entry
93 *****************************************************************************
94 */
95uint32_t marvell_get_spsr_for_bl32_entry(void)
96{
97 /*
98 * The Secure Payload Dispatcher service is responsible for
99 * setting the SPSR prior to entry into the BL32 image.
100 */
101 return 0;
102}
103
104/*****************************************************************************
105 * Gets SPSR for BL33 entry
106 *****************************************************************************
107 */
108uint32_t marvell_get_spsr_for_bl33_entry(void)
109{
110 unsigned long el_status;
111 unsigned int mode;
112 uint32_t spsr;
113
114 /* Figure out what mode we enter the non-secure world in */
115 el_status = read_id_aa64pfr0_el1() >> ID_AA64PFR0_EL2_SHIFT;
116 el_status &= ID_AA64PFR0_ELX_MASK;
117
118 mode = (el_status) ? MODE_EL2 : MODE_EL1;
119
120 /*
121 * TODO: Consider the possibility of specifying the SPSR in
122 * the FIP ToC and allowing the platform to have a say as
123 * well.
124 */
125 spsr = SPSR_64(mode, MODE_SP_ELX, DISABLE_ALL_EXCEPTIONS);
126 return spsr;
127}
128
129/*****************************************************************************
130 * Returns ARM platform specific memory map regions.
131 *****************************************************************************
132 */
133const mmap_region_t *plat_marvell_get_mmap(void)
134{
135 return plat_marvell_mmap;
136}
137