Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 1 | /* |
Harvey Hsieh | b9b374f | 2016-11-15 22:04:51 +0800 | [diff] [blame] | 2 | * Copyright (c) 2015-2017, ARM Limited and Contributors. All rights reserved. |
Varun Wadekar | d292e5d | 2018-05-17 10:42:18 -0700 | [diff] [blame] | 3 | * Copyright (c) 2019-2020, NVIDIA Corporation. All rights reserved. |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 4 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 5 | * SPDX-License-Identifier: BSD-3-Clause |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 6 | */ |
| 7 | |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 8 | #include <assert.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 9 | #include <string.h> |
| 10 | |
| 11 | #include <arch_helpers.h> |
| 12 | #include <common/bl_common.h> |
| 13 | #include <common/debug.h> |
| 14 | #include <lib/mmio.h> |
| 15 | #include <lib/utils.h> |
| 16 | #include <lib/xlat_tables/xlat_tables_v2.h> |
| 17 | |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 18 | #include <mce.h> |
| 19 | #include <memctrl.h> |
| 20 | #include <memctrl_v2.h> |
Varun Wadekar | 87e44ff | 2016-03-03 13:22:39 -0800 | [diff] [blame] | 21 | #include <smmu.h> |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 22 | #include <tegra_def.h> |
Varun Wadekar | e81177d | 2016-07-18 17:43:41 -0700 | [diff] [blame] | 23 | #include <tegra_platform.h> |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 24 | #include <tegra_private.h> |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 25 | |
| 26 | /* Video Memory base and size (live values) */ |
| 27 | static uint64_t video_mem_base; |
Varun Wadekar | 7058aee | 2016-04-25 09:01:46 -0700 | [diff] [blame] | 28 | static uint64_t video_mem_size_mb; |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 29 | |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 30 | /* |
Varun Wadekar | 87e44ff | 2016-03-03 13:22:39 -0800 | [diff] [blame] | 31 | * Init Memory controller during boot. |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 32 | */ |
| 33 | void tegra_memctrl_setup(void) |
| 34 | { |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 35 | INFO("Tegra Memory Controller (v2)\n"); |
| 36 | |
Varun Wadekar | d4a698f | 2019-08-26 10:20:53 -0700 | [diff] [blame] | 37 | /* Initialize the System memory management unit */ |
Varun Wadekar | 87e44ff | 2016-03-03 13:22:39 -0800 | [diff] [blame] | 38 | tegra_smmu_init(); |
Varun Wadekar | cba0529 | 2017-11-29 17:14:24 -0800 | [diff] [blame] | 39 | |
Varun Wadekar | d4a698f | 2019-08-26 10:20:53 -0700 | [diff] [blame] | 40 | /* allow platforms to program custom memory controller settings */ |
| 41 | plat_memctrl_setup(); |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 42 | |
| 43 | /* |
| 44 | * All requests at boot time, and certain requests during |
| 45 | * normal run time, are physically addressed and must bypass |
| 46 | * the SMMU. The client hub logic implements a hardware bypass |
| 47 | * path around the Translation Buffer Units (TBU). During |
| 48 | * boot-time, the SMMU_BYPASS_CTRL register (which defaults to |
| 49 | * TBU_BYPASS mode) will be used to steer all requests around |
| 50 | * the uninitialized TBUs. During normal operation, this register |
| 51 | * is locked into TBU_BYPASS_SID config, which routes requests |
| 52 | * with special StreamID 0x7f on the bypass path and all others |
| 53 | * through the selected TBU. This is done to disable SMMU Bypass |
| 54 | * mode, as it could be used to circumvent SMMU security checks. |
| 55 | */ |
| 56 | tegra_mc_write_32(MC_SMMU_BYPASS_CONFIG, |
Pritesh Raithatha | 9eb5db5 | 2017-01-02 19:42:31 +0530 | [diff] [blame] | 57 | MC_SMMU_BYPASS_CONFIG_SETTINGS); |
Varun Wadekar | 87e44ff | 2016-03-03 13:22:39 -0800 | [diff] [blame] | 58 | } |
Varun Wadekar | c9ac3e4 | 2016-02-17 15:07:49 -0800 | [diff] [blame] | 59 | |
Varun Wadekar | 87e44ff | 2016-03-03 13:22:39 -0800 | [diff] [blame] | 60 | /* |
| 61 | * Restore Memory Controller settings after "System Suspend" |
| 62 | */ |
| 63 | void tegra_memctrl_restore_settings(void) |
| 64 | { |
Varun Wadekar | d4a698f | 2019-08-26 10:20:53 -0700 | [diff] [blame] | 65 | /* restore platform's memory controller settings */ |
| 66 | plat_memctrl_restore(); |
Varun Wadekar | ad45ef7 | 2017-04-03 13:44:57 -0700 | [diff] [blame] | 67 | |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 68 | /* video memory carveout region */ |
Anthony Zhou | 0e07e45 | 2017-07-26 17:16:54 +0800 | [diff] [blame] | 69 | if (video_mem_base != 0ULL) { |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 70 | tegra_mc_write_32(MC_VIDEO_PROTECT_BASE_LO, |
| 71 | (uint32_t)video_mem_base); |
Anthony Zhou | 41eac8a | 2019-12-04 14:58:23 +0800 | [diff] [blame] | 72 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_BASE_LO) |
| 73 | == (uint32_t)video_mem_base); |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 74 | tegra_mc_write_32(MC_VIDEO_PROTECT_BASE_HI, |
| 75 | (uint32_t)(video_mem_base >> 32)); |
Anthony Zhou | 41eac8a | 2019-12-04 14:58:23 +0800 | [diff] [blame] | 76 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_BASE_HI) |
| 77 | == (uint32_t)(video_mem_base >> 32)); |
| 78 | tegra_mc_write_32(MC_VIDEO_PROTECT_SIZE_MB, |
| 79 | (uint32_t)video_mem_size_mb); |
| 80 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_SIZE_MB) |
| 81 | == (uint32_t)video_mem_size_mb); |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 82 | |
| 83 | /* |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 84 | * MCE propagates the VideoMem configuration values across the |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 85 | * CCPLEX. |
| 86 | */ |
| 87 | mce_update_gsc_videomem(); |
| 88 | } |
| 89 | } |
| 90 | |
| 91 | /* |
| 92 | * Secure the BL31 DRAM aperture. |
| 93 | * |
| 94 | * phys_base = physical base of TZDRAM aperture |
| 95 | * size_in_bytes = size of aperture in bytes |
| 96 | */ |
| 97 | void tegra_memctrl_tzdram_setup(uint64_t phys_base, uint32_t size_in_bytes) |
| 98 | { |
| 99 | /* |
Varun Wadekar | f3cd509 | 2017-10-30 14:35:17 -0700 | [diff] [blame] | 100 | * Perform platform specific steps. |
Harvey Hsieh | c95802d | 2016-07-29 20:10:59 +0800 | [diff] [blame] | 101 | */ |
Varun Wadekar | f3cd509 | 2017-10-30 14:35:17 -0700 | [diff] [blame] | 102 | plat_memctrl_tzdram_setup(phys_base, size_in_bytes); |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 103 | } |
| 104 | |
| 105 | /* |
Varun Wadekar | 13e7dc4 | 2015-12-30 15:15:08 -0800 | [diff] [blame] | 106 | * Secure the BL31 TZRAM aperture. |
| 107 | * |
| 108 | * phys_base = physical base of TZRAM aperture |
| 109 | * size_in_bytes = size of aperture in bytes |
| 110 | */ |
| 111 | void tegra_memctrl_tzram_setup(uint64_t phys_base, uint32_t size_in_bytes) |
| 112 | { |
Varun Wadekar | 5a70094 | 2019-01-23 16:54:12 -0800 | [diff] [blame] | 113 | ; /* do nothing */ |
Varun Wadekar | 13e7dc4 | 2015-12-30 15:15:08 -0800 | [diff] [blame] | 114 | } |
| 115 | |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 116 | /* |
| 117 | * Save MC settings before "System Suspend" to TZDRAM |
| 118 | */ |
| 119 | void tegra_mc_save_context(uint64_t mc_ctx_addr) |
| 120 | { |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 121 | uint32_t i, num_entries = 0; |
| 122 | mc_regs_t *mc_ctx_regs; |
| 123 | const plat_params_from_bl2_t *params_from_bl2 = bl31_get_plat_params(); |
| 124 | uint64_t tzdram_base = params_from_bl2->tzdram_base; |
| 125 | uint64_t tzdram_end = tzdram_base + params_from_bl2->tzdram_size; |
| 126 | |
| 127 | assert((mc_ctx_addr >= tzdram_base) && (mc_ctx_addr <= tzdram_end)); |
| 128 | |
| 129 | /* get MC context table */ |
Varun Wadekar | d4a698f | 2019-08-26 10:20:53 -0700 | [diff] [blame] | 130 | mc_ctx_regs = plat_memctrl_get_sys_suspend_ctx(); |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 131 | assert(mc_ctx_regs != NULL); |
| 132 | |
| 133 | /* |
| 134 | * mc_ctx_regs[0].val contains the size of the context table minus |
| 135 | * the last entry. Sanity check the table size before we start with |
| 136 | * the context save operation. |
| 137 | */ |
| 138 | while (mc_ctx_regs[num_entries].reg != 0xFFFFFFFFU) { |
| 139 | num_entries++; |
| 140 | } |
| 141 | |
| 142 | /* panic if the sizes do not match */ |
| 143 | if (num_entries != mc_ctx_regs[0].val) { |
| 144 | ERROR("MC context size mismatch!"); |
| 145 | panic(); |
| 146 | } |
| 147 | |
| 148 | /* save MC register values */ |
| 149 | for (i = 1U; i < num_entries; i++) { |
| 150 | mc_ctx_regs[i].val = mmio_read_32(mc_ctx_regs[i].reg); |
| 151 | } |
| 152 | |
| 153 | /* increment by 1 to take care of the last entry */ |
| 154 | num_entries++; |
| 155 | |
| 156 | /* Save MC config settings */ |
| 157 | (void)memcpy((void *)mc_ctx_addr, mc_ctx_regs, |
| 158 | sizeof(mc_regs_t) * num_entries); |
| 159 | |
| 160 | /* save the MC table address */ |
| 161 | mmio_write_32(TEGRA_SCRATCH_BASE + SCRATCH_MC_TABLE_ADDR_LO, |
| 162 | (uint32_t)mc_ctx_addr); |
Anthony Zhou | 9de77f6 | 2019-11-13 18:36:07 +0800 | [diff] [blame] | 163 | assert(mmio_read_32(TEGRA_SCRATCH_BASE + SCRATCH_MC_TABLE_ADDR_LO) |
| 164 | == (uint32_t)mc_ctx_addr); |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 165 | mmio_write_32(TEGRA_SCRATCH_BASE + SCRATCH_MC_TABLE_ADDR_HI, |
| 166 | (uint32_t)(mc_ctx_addr >> 32)); |
Anthony Zhou | 9de77f6 | 2019-11-13 18:36:07 +0800 | [diff] [blame] | 167 | assert(mmio_read_32(TEGRA_SCRATCH_BASE + SCRATCH_MC_TABLE_ADDR_HI) |
| 168 | == (uint32_t)(mc_ctx_addr >> 32)); |
Pritesh Raithatha | 75c9443 | 2018-08-03 15:48:15 +0530 | [diff] [blame] | 169 | } |
| 170 | |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 171 | static void tegra_lock_videomem_nonoverlap(uint64_t phys_base, |
| 172 | uint64_t size_in_bytes) |
| 173 | { |
| 174 | uint32_t index; |
| 175 | uint64_t total_128kb_blocks = size_in_bytes >> 17; |
| 176 | uint64_t residual_4kb_blocks = (size_in_bytes & (uint32_t)0x1FFFF) >> 12; |
| 177 | uint64_t val; |
| 178 | |
| 179 | /* |
| 180 | * Reset the access configuration registers to restrict access to |
| 181 | * old Videomem aperture |
| 182 | */ |
| 183 | for (index = MC_VIDEO_PROTECT_CLEAR_ACCESS_CFG0; |
| 184 | index < ((uint32_t)MC_VIDEO_PROTECT_CLEAR_ACCESS_CFG0 + (uint32_t)MC_GSC_CONFIG_REGS_SIZE); |
| 185 | index += 4U) { |
| 186 | tegra_mc_write_32(index, 0); |
| 187 | } |
| 188 | |
| 189 | /* |
| 190 | * Set the base. It must be 4k aligned, at least. |
| 191 | */ |
| 192 | assert((phys_base & (uint64_t)0xFFF) == 0U); |
| 193 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_BASE_LO, (uint32_t)phys_base); |
| 194 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_BASE_HI, |
| 195 | (uint32_t)(phys_base >> 32) & (uint32_t)MC_GSC_BASE_HI_MASK); |
| 196 | |
| 197 | /* |
| 198 | * Set the aperture size |
| 199 | * |
| 200 | * total size = (number of 128KB blocks) + (number of remaining 4KB |
| 201 | * blocks) |
| 202 | * |
| 203 | */ |
| 204 | val = (uint32_t)((residual_4kb_blocks << MC_GSC_SIZE_RANGE_4KB_SHIFT) | |
| 205 | total_128kb_blocks); |
| 206 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_SIZE, (uint32_t)val); |
| 207 | |
| 208 | /* |
| 209 | * Lock the configuration settings by enabling TZ-only lock and |
| 210 | * locking the configuration against any future changes from NS |
| 211 | * world. |
| 212 | */ |
| 213 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_CFG, |
| 214 | (uint32_t)MC_GSC_ENABLE_TZ_LOCK_BIT); |
| 215 | |
| 216 | /* |
| 217 | * MCE propagates the GSC configuration values across the |
| 218 | * CCPLEX. |
| 219 | */ |
| 220 | } |
| 221 | |
| 222 | static void tegra_unlock_videomem_nonoverlap(void) |
| 223 | { |
| 224 | /* Clear the base */ |
| 225 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_BASE_LO, 0); |
| 226 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_BASE_HI, 0); |
| 227 | |
| 228 | /* Clear the size */ |
| 229 | tegra_mc_write_32(MC_VIDEO_PROTECT_CLEAR_SIZE, 0); |
| 230 | } |
| 231 | |
| 232 | static void tegra_clear_videomem(uintptr_t non_overlap_area_start, |
| 233 | unsigned long long non_overlap_area_size) |
| 234 | { |
Varun Wadekar | 117a2e0 | 2017-08-03 11:40:34 -0700 | [diff] [blame] | 235 | int ret; |
| 236 | |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 237 | INFO("Cleaning previous Video Memory Carveout\n"); |
| 238 | |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 239 | /* |
| 240 | * Map the NS memory first, clean it and then unmap it. |
| 241 | */ |
Varun Wadekar | 117a2e0 | 2017-08-03 11:40:34 -0700 | [diff] [blame] | 242 | ret = mmap_add_dynamic_region(non_overlap_area_start, /* PA */ |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 243 | non_overlap_area_start, /* VA */ |
| 244 | non_overlap_area_size, /* size */ |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 245 | MT_DEVICE | MT_RW | MT_NS); /* attrs */ |
Varun Wadekar | 117a2e0 | 2017-08-03 11:40:34 -0700 | [diff] [blame] | 246 | assert(ret == 0); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 247 | |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 248 | zeromem((void *)non_overlap_area_start, non_overlap_area_size); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 249 | flush_dcache_range(non_overlap_area_start, non_overlap_area_size); |
| 250 | |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 251 | ret = mmap_remove_dynamic_region(non_overlap_area_start, |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 252 | non_overlap_area_size); |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 253 | assert(ret == 0); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 254 | } |
| 255 | |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 256 | static void tegra_clear_videomem_nonoverlap(uintptr_t phys_base, |
| 257 | unsigned long size_in_bytes) |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 258 | { |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 259 | uintptr_t vmem_end_old = video_mem_base + (video_mem_size_mb << 20); |
| 260 | uintptr_t vmem_end_new = phys_base + size_in_bytes; |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 261 | unsigned long long non_overlap_area_size; |
Varun Wadekar | e60f1bf | 2016-02-17 10:10:50 -0800 | [diff] [blame] | 262 | |
| 263 | /* |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 264 | * Clear the old regions now being exposed. The following cases |
| 265 | * can occur - |
| 266 | * |
| 267 | * 1. clear whole old region (no overlap with new region) |
| 268 | * 2. clear old sub-region below new base |
| 269 | * 3. clear old sub-region above new end |
| 270 | */ |
Anthony Zhou | 0844b97 | 2017-06-28 16:35:54 +0800 | [diff] [blame] | 271 | if ((phys_base > vmem_end_old) || (video_mem_base > vmem_end_new)) { |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 272 | tegra_clear_videomem(video_mem_base, |
Varun Wadekar | 8b1c004 | 2019-09-05 08:17:02 -0700 | [diff] [blame] | 273 | video_mem_size_mb << 20U); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 274 | } else { |
| 275 | if (video_mem_base < phys_base) { |
| 276 | non_overlap_area_size = phys_base - video_mem_base; |
Varun Wadekar | 8b1c004 | 2019-09-05 08:17:02 -0700 | [diff] [blame] | 277 | tegra_clear_videomem(video_mem_base, non_overlap_area_size); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 278 | } |
| 279 | if (vmem_end_old > vmem_end_new) { |
| 280 | non_overlap_area_size = vmem_end_old - vmem_end_new; |
Varun Wadekar | 8b1c004 | 2019-09-05 08:17:02 -0700 | [diff] [blame] | 281 | tegra_clear_videomem(vmem_end_new, non_overlap_area_size); |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 282 | } |
| 283 | } |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 284 | } |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 285 | |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 286 | /* |
| 287 | * Program the Video Memory carveout region |
| 288 | * |
| 289 | * phys_base = physical base of aperture |
| 290 | * size_in_bytes = size of aperture in bytes |
| 291 | */ |
| 292 | void tegra_memctrl_videomem_setup(uint64_t phys_base, uint32_t size_in_bytes) |
| 293 | { |
| 294 | /* |
| 295 | * Setup the Memory controller to restrict CPU accesses to the Video |
| 296 | * Memory region |
| 297 | */ |
| 298 | |
| 299 | INFO("Configuring Video Memory Carveout\n"); |
| 300 | |
| 301 | if (video_mem_base != 0U) { |
| 302 | /* |
| 303 | * Lock the non overlapping memory being cleared so that |
Elyes Haouas | 2be03c0 | 2023-02-13 09:14:48 +0100 | [diff] [blame] | 304 | * other masters do not accidentally write to it. The memory |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 305 | * would be unlocked once the non overlapping region is |
| 306 | * cleared and the new memory settings take effect. |
| 307 | */ |
| 308 | tegra_lock_videomem_nonoverlap(video_mem_base, |
| 309 | video_mem_size_mb << 20); |
| 310 | } |
| 311 | |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 312 | /* program the Videomem aperture */ |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 313 | tegra_mc_write_32(MC_VIDEO_PROTECT_BASE_LO, (uint32_t)phys_base); |
| 314 | tegra_mc_write_32(MC_VIDEO_PROTECT_BASE_HI, |
| 315 | (uint32_t)(phys_base >> 32)); |
Varun Wadekar | 7058aee | 2016-04-25 09:01:46 -0700 | [diff] [blame] | 316 | tegra_mc_write_32(MC_VIDEO_PROTECT_SIZE_MB, size_in_bytes >> 20); |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 317 | |
Anthony Zhou | 41eac8a | 2019-12-04 14:58:23 +0800 | [diff] [blame] | 318 | /* Redundancy check for Video Protect setting */ |
| 319 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_BASE_LO) |
| 320 | == (uint32_t)phys_base); |
| 321 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_BASE_HI) |
| 322 | == (uint32_t)(phys_base >> 32)); |
| 323 | assert(tegra_mc_read_32(MC_VIDEO_PROTECT_SIZE_MB) |
| 324 | == (size_in_bytes >> 20)); |
| 325 | |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 326 | /* |
Varun Wadekar | 153982c | 2016-12-21 14:50:18 -0800 | [diff] [blame] | 327 | * MCE propagates the VideoMem configuration values across the |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 328 | * CCPLEX. |
| 329 | */ |
Varun Wadekar | e31d083 | 2020-06-02 21:08:38 -0700 | [diff] [blame] | 330 | (void)mce_update_gsc_videomem(); |
| 331 | |
| 332 | /* Clear the non-overlapping memory */ |
| 333 | if (video_mem_base != 0U) { |
| 334 | tegra_clear_videomem_nonoverlap(phys_base, size_in_bytes); |
| 335 | tegra_unlock_videomem_nonoverlap(); |
| 336 | } |
| 337 | |
| 338 | /* store new values */ |
| 339 | video_mem_base = phys_base; |
| 340 | video_mem_size_mb = (uint64_t)size_in_bytes >> 20; |
Varun Wadekar | cd5a2f5 | 2015-09-20 15:08:22 +0530 | [diff] [blame] | 341 | } |
Varun Wadekar | c92050b | 2017-03-29 14:57:29 -0700 | [diff] [blame] | 342 | |
| 343 | /* |
| 344 | * This feature exists only for v1 of the Tegra Memory Controller. |
| 345 | */ |
| 346 | void tegra_memctrl_disable_ahb_redirection(void) |
| 347 | { |
| 348 | ; /* do nothing */ |
| 349 | } |
Harvey Hsieh | 359be95 | 2017-08-21 15:01:53 +0800 | [diff] [blame] | 350 | |
| 351 | void tegra_memctrl_clear_pending_interrupts(void) |
| 352 | { |
| 353 | ; /* do nothing */ |
| 354 | } |