Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0+ */ |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 2 | /* |
| 3 | * (C) Copyright 2010 |
| 4 | * Texas Instruments, <www.ti.com> |
| 5 | * Aneesh V <aneesh@ti.com> |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 6 | */ |
| 7 | #ifndef ARMV7_H |
| 8 | #define ARMV7_H |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 9 | |
Aneesh V | 162ced3 | 2011-07-21 09:10:04 -0400 | [diff] [blame] | 10 | /* Cortex-A9 revisions */ |
| 11 | #define MIDR_CORTEX_A9_R0P1 0x410FC091 |
| 12 | #define MIDR_CORTEX_A9_R1P2 0x411FC092 |
| 13 | #define MIDR_CORTEX_A9_R1P3 0x411FC093 |
Aneesh V | 0b92f09 | 2011-07-21 09:29:23 -0400 | [diff] [blame] | 14 | #define MIDR_CORTEX_A9_R2P10 0x412FC09A |
Aneesh V | 162ced3 | 2011-07-21 09:10:04 -0400 | [diff] [blame] | 15 | |
Sricharan | 9310ff7 | 2011-11-15 09:49:55 -0500 | [diff] [blame] | 16 | /* Cortex-A15 revisions */ |
| 17 | #define MIDR_CORTEX_A15_R0P0 0x410FC0F0 |
SRICHARAN R | cf85056 | 2013-02-12 01:33:41 +0000 | [diff] [blame] | 18 | #define MIDR_CORTEX_A15_R2P2 0x412FC0F2 |
Sricharan | 9310ff7 | 2011-11-15 09:49:55 -0500 | [diff] [blame] | 19 | |
Andre Przywara | dd5e8da | 2013-09-19 18:06:41 +0200 | [diff] [blame] | 20 | /* Cortex-A7 revisions */ |
| 21 | #define MIDR_CORTEX_A7_R0P0 0x410FC070 |
| 22 | |
| 23 | #define MIDR_PRIMARY_PART_MASK 0xFF0FFFF0 |
| 24 | |
| 25 | /* ID_PFR1 feature fields */ |
| 26 | #define CPUID_ARM_SEC_SHIFT 4 |
| 27 | #define CPUID_ARM_SEC_MASK (0xF << CPUID_ARM_SEC_SHIFT) |
| 28 | #define CPUID_ARM_VIRT_SHIFT 12 |
| 29 | #define CPUID_ARM_VIRT_MASK (0xF << CPUID_ARM_VIRT_SHIFT) |
| 30 | #define CPUID_ARM_GENTIMER_SHIFT 16 |
| 31 | #define CPUID_ARM_GENTIMER_MASK (0xF << CPUID_ARM_GENTIMER_SHIFT) |
| 32 | |
| 33 | /* valid bits in CBAR register / PERIPHBASE value */ |
| 34 | #define CBAR_MASK 0xFFFF8000 |
| 35 | |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 36 | /* CCSIDR */ |
| 37 | #define CCSIDR_LINE_SIZE_OFFSET 0 |
| 38 | #define CCSIDR_LINE_SIZE_MASK 0x7 |
| 39 | #define CCSIDR_ASSOCIATIVITY_OFFSET 3 |
| 40 | #define CCSIDR_ASSOCIATIVITY_MASK (0x3FF << 3) |
| 41 | #define CCSIDR_NUM_SETS_OFFSET 13 |
| 42 | #define CCSIDR_NUM_SETS_MASK (0x7FFF << 13) |
| 43 | |
| 44 | /* |
| 45 | * Values for InD field in CSSELR |
| 46 | * Selects the type of cache |
| 47 | */ |
| 48 | #define ARMV7_CSSELR_IND_DATA_UNIFIED 0 |
| 49 | #define ARMV7_CSSELR_IND_INSTRUCTION 1 |
| 50 | |
| 51 | /* Values for Ctype fields in CLIDR */ |
| 52 | #define ARMV7_CLIDR_CTYPE_NO_CACHE 0 |
| 53 | #define ARMV7_CLIDR_CTYPE_INSTRUCTION_ONLY 1 |
| 54 | #define ARMV7_CLIDR_CTYPE_DATA_ONLY 2 |
| 55 | #define ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA 3 |
| 56 | #define ARMV7_CLIDR_CTYPE_UNIFIED 4 |
| 57 | |
Andre Przywara | a6bb668 | 2013-09-19 18:06:39 +0200 | [diff] [blame] | 58 | #ifndef __ASSEMBLY__ |
| 59 | #include <linux/types.h> |
Tom Rini | e968973 | 2015-03-02 08:24:45 -0500 | [diff] [blame] | 60 | #include <asm/io.h> |
Andre Przywara | e996bc6 | 2016-05-12 12:14:41 +0100 | [diff] [blame] | 61 | #include <asm/barriers.h> |
Valentine Barshak | 689bfa2 | 2015-03-20 18:16:17 +0300 | [diff] [blame] | 62 | |
Jagan Teki | 71d71fe | 2017-09-27 23:03:10 +0530 | [diff] [blame] | 63 | /* read L2 control register (L2CTLR) */ |
| 64 | static inline uint32_t read_l2ctlr(void) |
| 65 | { |
| 66 | uint32_t val = 0; |
| 67 | |
| 68 | asm volatile ("mrc p15, 1, %0, c9, c0, 2" : "=r" (val)); |
| 69 | |
| 70 | return val; |
| 71 | } |
| 72 | |
| 73 | /* write L2 control register (L2CTLR) */ |
| 74 | static inline void write_l2ctlr(uint32_t val) |
| 75 | { |
| 76 | /* |
| 77 | * Note: L2CTLR can only be written when the L2 memory system |
| 78 | * is idle, ie before the MMU is enabled. |
| 79 | */ |
| 80 | asm volatile("mcr p15, 1, %0, c9, c0, 2" : : "r" (val) : "memory"); |
| 81 | isb(); |
| 82 | } |
| 83 | |
Akshay Saraswat | e5be413 | 2015-02-20 13:27:13 +0530 | [diff] [blame] | 84 | /* |
| 85 | * Workaround for ARM errata # 798870 |
| 86 | * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been |
| 87 | * stalled for 1024 cycles to verify that its hazard condition still exists. |
| 88 | */ |
| 89 | static inline void v7_enable_l2_hazard_detect(void) |
| 90 | { |
| 91 | uint32_t val; |
| 92 | |
| 93 | /* L2ACTLR[7]: Enable hazard detect timeout */ |
| 94 | asm volatile ("mrc p15, 1, %0, c15, c0, 0\n\t" : "=r"(val)); |
| 95 | val |= (1 << 7); |
| 96 | asm volatile ("mcr p15, 1, %0, c15, c0, 0\n\t" : : "r"(val)); |
| 97 | } |
| 98 | |
Akshay Saraswat | 57fd639 | 2015-02-20 13:27:14 +0530 | [diff] [blame] | 99 | /* |
| 100 | * Workaround for ARM errata # 799270 |
| 101 | * Ensure that the L2 logic has been used within the previous 256 cycles |
| 102 | * before modifying the ACTLR.SMP bit. This is required during boot before |
| 103 | * MMU has been enabled, or during a specified reset or power down sequence. |
| 104 | */ |
| 105 | static inline void v7_enable_smp(uint32_t address) |
| 106 | { |
| 107 | uint32_t temp, val; |
| 108 | |
| 109 | /* Read auxiliary control register */ |
| 110 | asm volatile ("mrc p15, 0, %0, c1, c0, 1\n\t" : "=r"(val)); |
| 111 | |
| 112 | /* Enable SMP */ |
| 113 | val |= (1 << 6); |
| 114 | |
| 115 | /* Dummy read to assure L2 access */ |
| 116 | temp = readl(address); |
| 117 | temp &= 0; |
| 118 | val |= temp; |
| 119 | |
| 120 | /* Write auxiliary control register */ |
| 121 | asm volatile ("mcr p15, 0, %0, c1, c0, 1\n\t" : : "r"(val)); |
| 122 | |
| 123 | CP15DSB; |
| 124 | CP15ISB; |
| 125 | } |
| 126 | |
Akshay Saraswat | e5be413 | 2015-02-20 13:27:13 +0530 | [diff] [blame] | 127 | void v7_en_l2_hazard_detect(void); |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 128 | void v7_outer_cache_enable(void); |
| 129 | void v7_outer_cache_disable(void); |
| 130 | void v7_outer_cache_flush_all(void); |
| 131 | void v7_outer_cache_inval_all(void); |
| 132 | void v7_outer_cache_flush_range(u32 start, u32 end); |
| 133 | void v7_outer_cache_inval_range(u32 start, u32 end); |
| 134 | |
Jan Kiszka | ac31b5a | 2015-04-21 07:18:24 +0200 | [diff] [blame] | 135 | #ifdef CONFIG_ARMV7_NONSEC |
Andre Przywara | ad5ad74 | 2013-09-19 18:06:42 +0200 | [diff] [blame] | 136 | |
Marc Zyngier | 855ca66 | 2014-07-12 14:24:03 +0100 | [diff] [blame] | 137 | int armv7_init_nonsec(void); |
Jan Kiszka | cb33440 | 2015-04-21 07:18:32 +0200 | [diff] [blame] | 138 | int armv7_apply_memory_carveout(u64 *start, u64 *size); |
Ian Campbell | 68bc8f5 | 2014-12-21 09:45:11 +0000 | [diff] [blame] | 139 | bool armv7_boot_nonsec(void); |
Andre Przywara | ad5ad74 | 2013-09-19 18:06:42 +0200 | [diff] [blame] | 140 | |
Andre Przywara | dd5e8da | 2013-09-19 18:06:41 +0200 | [diff] [blame] | 141 | /* defined in assembly file */ |
| 142 | unsigned int _nonsec_init(void); |
Marc Zyngier | 855ca66 | 2014-07-12 14:24:03 +0100 | [diff] [blame] | 143 | void _do_nonsec_entry(void *target_pc, unsigned long r0, |
| 144 | unsigned long r1, unsigned long r2); |
Andre Przywara | dbbe196 | 2013-09-19 18:06:44 +0200 | [diff] [blame] | 145 | void _smp_pen(void); |
Marc Zyngier | 855ca66 | 2014-07-12 14:24:03 +0100 | [diff] [blame] | 146 | |
| 147 | extern char __secure_start[]; |
| 148 | extern char __secure_end[]; |
Chen-Yu Tsai | a00f85d | 2016-06-19 12:38:36 +0800 | [diff] [blame] | 149 | extern char __secure_stack_start[]; |
| 150 | extern char __secure_stack_end[]; |
Marc Zyngier | 855ca66 | 2014-07-12 14:24:03 +0100 | [diff] [blame] | 151 | |
Jan Kiszka | ac31b5a | 2015-04-21 07:18:24 +0200 | [diff] [blame] | 152 | #endif /* CONFIG_ARMV7_NONSEC */ |
Andre Przywara | dd5e8da | 2013-09-19 18:06:41 +0200 | [diff] [blame] | 153 | |
Nishanth Menon | aa0294e | 2015-03-09 17:11:59 -0500 | [diff] [blame] | 154 | void v7_arch_cp15_set_l2aux_ctrl(u32 l2auxctrl, u32 cpu_midr, |
| 155 | u32 cpu_rev_comb, u32 cpu_variant, |
| 156 | u32 cpu_rev); |
Nishanth Menon | 071d6ce | 2015-03-09 17:12:00 -0500 | [diff] [blame] | 157 | void v7_arch_cp15_set_acr(u32 acr, u32 cpu_midr, u32 cpu_rev_comb, |
| 158 | u32 cpu_variant, u32 cpu_rev); |
Marek Vasut | 1357804 | 2023-07-01 17:26:18 +0200 | [diff] [blame] | 159 | void v7_arch_cp15_allow_unaligned(void); |
Andre Przywara | a6bb668 | 2013-09-19 18:06:39 +0200 | [diff] [blame] | 160 | #endif /* ! __ASSEMBLY__ */ |
| 161 | |
Aneesh V | 960f5c0 | 2011-06-16 23:30:47 +0000 | [diff] [blame] | 162 | #endif |