blob: 86c160987a953b45e89d02b18c3b4c2607c1b51e [file] [log] [blame]
Patrick Delaunaye0207372018-04-16 10:13:24 +02001// SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2/*
3 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
4 */
5
6#include <config.h>
7#include <common.h>
8#include <asm/armv7.h>
Simon Glass274e0b02020-05-10 11:39:56 -06009#include <asm/cache.h>
Patrick Delaunaye0207372018-04-16 10:13:24 +020010#include <asm/gic.h>
11#include <asm/io.h>
12#include <asm/psci.h>
13#include <asm/secure.h>
Marek Vasut83ec9582022-02-25 02:15:59 +010014#include <hang.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060015#include <linux/bitops.h>
Patrick Delaunaye0207372018-04-16 10:13:24 +020016
Marek Vasut83ec9582022-02-25 02:15:59 +010017/* PWR */
18#define PWR_CR3 0x0c
19#define PWR_MPUCR 0x10
Patrick Delaunaye0207372018-04-16 10:13:24 +020020
Marek Vasut83ec9582022-02-25 02:15:59 +010021#define PWR_CR3_DDRSREN BIT(10)
22#define PWR_CR3_DDRRETEN BIT(12)
Patrick Delaunaye0207372018-04-16 10:13:24 +020023
Marek Vasut83ec9582022-02-25 02:15:59 +010024#define PWR_MPUCR_PDDS BIT(0)
25#define PWR_MPUCR_CSTDBYDIS BIT(3)
26#define PWR_MPUCR_CSSF BIT(9)
Patrick Delaunaye0207372018-04-16 10:13:24 +020027
Marek Vasut83ec9582022-02-25 02:15:59 +010028/* RCC */
29#define RCC_DDRITFCR 0xd8
30
31#define RCC_DDRITFCR_DDRC1EN BIT(0)
32#define RCC_DDRITFCR_DDRC1LPEN BIT(1)
33#define RCC_DDRITFCR_DDRC2EN BIT(2)
34#define RCC_DDRITFCR_DDRC2LPEN BIT(3)
35#define RCC_DDRITFCR_DDRPHYCEN BIT(4)
36#define RCC_DDRITFCR_DDRPHYCLPEN BIT(5)
37#define RCC_DDRITFCR_DDRCAPBEN BIT(6)
38#define RCC_DDRITFCR_DDRCAPBLPEN BIT(7)
39#define RCC_DDRITFCR_AXIDCGEN BIT(8)
40#define RCC_DDRITFCR_DDRPHYCAPBEN BIT(9)
41#define RCC_DDRITFCR_DDRPHYCAPBLPEN BIT(10)
42#define RCC_DDRITFCR_DDRCKMOD_MASK GENMASK(22, 20)
43#define RCC_DDRITFCR_GSKPCTRL BIT(24)
44
45#define RCC_MP_SREQSETR 0x104
46#define RCC_MP_SREQCLRR 0x108
47
48#define RCC_MP_CIER 0x414
49#define RCC_MP_CIFR 0x418
50#define RCC_MP_CIFR_WKUPF BIT(20)
51
52/* SYSCFG */
53#define SYSCFG_CMPCR 0x20
54#define SYSCFG_CMPCR_SW_CTRL BIT(2)
55#define SYSCFG_CMPENSETR 0x24
56#define SYSCFG_CMPENCLRR 0x28
57#define SYSCFG_CMPENR_MPUEN BIT(0)
58
59/* DDR Controller registers offsets */
60#define DDRCTRL_STAT 0x004
61#define DDRCTRL_PWRCTL 0x030
62#define DDRCTRL_PWRTMG 0x034
63#define DDRCTRL_HWLPCTL 0x038
64#define DDRCTRL_DFIMISC 0x1b0
65#define DDRCTRL_SWCTL 0x320
66#define DDRCTRL_SWSTAT 0x324
67#define DDRCTRL_PSTAT 0x3fc
68#define DDRCTRL_PCTRL_0 0x490
69#define DDRCTRL_PCTRL_1 0x540
70
71/* DDR Controller Register fields */
72#define DDRCTRL_STAT_OPERATING_MODE_MASK GENMASK(2, 0)
73#define DDRCTRL_STAT_OPERATING_MODE_NORMAL 0x1
74#define DDRCTRL_STAT_OPERATING_MODE_SR 0x3
75#define DDRCTRL_STAT_SELFREF_TYPE_MASK GENMASK(5, 4)
76#define DDRCTRL_STAT_SELFREF_TYPE_ASR (0x3 << 4)
77#define DDRCTRL_STAT_SELFREF_TYPE_SR (0x2 << 4)
78
79#define DDRCTRL_PWRCTL_SELFREF_EN BIT(0)
80#define DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE BIT(3)
81#define DDRCTRL_PWRCTL_SELFREF_SW BIT(5)
82
83#define DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK GENMASK(23, 16)
84#define DDRCTRL_PWRTMG_SELFREF_TO_X32_0 BIT(16)
85
86#define DDRCTRL_HWLPCTL_HW_LP_EN BIT(0)
87
88#define DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN BIT(0)
89
90#define DDRCTRL_SWCTL_SW_DONE BIT(0)
91
92#define DDRCTRL_SWSTAT_SW_DONE_ACK BIT(0)
93
94#define DDRCTRL_PSTAT_RD_PORT_BUSY_0 BIT(0)
95#define DDRCTRL_PSTAT_RD_PORT_BUSY_1 BIT(1)
96#define DDRCTRL_PSTAT_WR_PORT_BUSY_0 BIT(16)
97#define DDRCTRL_PSTAT_WR_PORT_BUSY_1 BIT(17)
98
99#define DDRCTRL_PCTRL_N_PORT_EN BIT(0)
100
101/* DDR PHY registers offsets */
102#define DDRPHYC_PIR 0x004
103#define DDRPHYC_PGSR 0x00c
104#define DDRPHYC_ACDLLCR 0x014
105#define DDRPHYC_ACIOCR 0x024
106#define DDRPHYC_DXCCR 0x028
107#define DDRPHYC_DSGCR 0x02c
108#define DDRPHYC_ZQ0CR0 0x180
109#define DDRPHYC_DX0DLLCR 0x1cc
110#define DDRPHYC_DX1DLLCR 0x20c
111#define DDRPHYC_DX2DLLCR 0x24c
112#define DDRPHYC_DX3DLLCR 0x28c
113
114/* DDR PHY Register fields */
115#define DDRPHYC_PIR_INIT BIT(0)
116#define DDRPHYC_PIR_DLLSRST BIT(1)
117#define DDRPHYC_PIR_DLLLOCK BIT(2)
118#define DDRPHYC_PIR_ITMSRST BIT(4)
119
120#define DDRPHYC_PGSR_IDONE BIT(0)
121
122#define DDRPHYC_ACDLLCR_DLLSRST BIT(30)
123#define DDRPHYC_ACDLLCR_DLLDIS BIT(31)
124
125#define DDRPHYC_ACIOCR_ACOE BIT(1)
126#define DDRPHYC_ACIOCR_ACPDD BIT(3)
127#define DDRPHYC_ACIOCR_ACPDR BIT(4)
128#define DDRPHYC_ACIOCR_CKPDD_MASK GENMASK(10, 8)
129#define DDRPHYC_ACIOCR_CKPDD_0 BIT(8)
130#define DDRPHYC_ACIOCR_CKPDR_MASK GENMASK(13, 11)
131#define DDRPHYC_ACIOCR_CKPDR_0 BIT(11)
132#define DDRPHYC_ACIOCR_CSPDD_MASK GENMASK(20, 18)
133#define DDRPHYC_ACIOCR_CSPDD_0 BIT(18)
134
135#define DDRPHYC_DXCCR_DXPDD BIT(2)
136#define DDRPHYC_DXCCR_DXPDR BIT(3)
137
138#define DDRPHYC_DSGCR_CKEPDD_MASK GENMASK(19, 16)
139#define DDRPHYC_DSGCR_CKEPDD_0 BIT(16)
140#define DDRPHYC_DSGCR_ODTPDD_MASK GENMASK(23, 20)
141#define DDRPHYC_DSGCR_ODTPDD_0 BIT(20)
142#define DDRPHYC_DSGCR_NL2PD BIT(24)
143#define DDRPHYC_DSGCR_CKOE BIT(28)
144
145#define DDRPHYC_ZQ0CRN_ZQPD BIT(31)
146
147#define DDRPHYC_DXNDLLCR_DLLDIS BIT(31)
148
149#define BOOT_API_A7_CORE0_MAGIC_NUMBER 0xca7face0
150#define BOOT_API_A7_CORE1_MAGIC_NUMBER 0xca7face1
151
152#define MPIDR_AFF0 GENMASK(7, 0)
153
154#define RCC_MP_GRSTCSETR (STM32_RCC_BASE + 0x0404)
155#define RCC_MP_GRSTCSETR_MPSYSRST BIT(0)
156#define RCC_MP_GRSTCSETR_MPUP0RST BIT(4)
157#define RCC_MP_GRSTCSETR_MPUP1RST BIT(5)
158
159#define STM32MP1_PSCI_NR_CPUS 2
Patrick Delaunaye0207372018-04-16 10:13:24 +0200160#if STM32MP1_PSCI_NR_CPUS > CONFIG_ARMV7_PSCI_NR_CPUS
161#error "invalid value for CONFIG_ARMV7_PSCI_NR_CPUS"
162#endif
163
164u8 psci_state[STM32MP1_PSCI_NR_CPUS] __secure_data = {
165 PSCI_AFFINITY_LEVEL_ON,
166 PSCI_AFFINITY_LEVEL_OFF};
167
Ludovic Barre5a40c512020-03-02 11:27:02 +0100168static u32 __secure_data cntfrq;
169
170static u32 __secure cp15_read_cntfrq(void)
171{
172 u32 frq;
173
174 asm volatile("mrc p15, 0, %0, c14, c0, 0" : "=r" (frq));
175
176 return frq;
177}
178
179static void __secure cp15_write_cntfrq(u32 frq)
180{
181 asm volatile ("mcr p15, 0, %0, c14, c0, 0" : : "r" (frq));
182}
183
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200184static inline void psci_set_state(int cpu, u8 state)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200185{
186 psci_state[cpu] = state;
187 dsb();
188 isb();
189}
190
191static u32 __secure stm32mp_get_gicd_base_address(void)
192{
193 u32 periphbase;
194
195 /* get the GIC base address from the CBAR register */
196 asm("mrc p15, 4, %0, c15, c0, 0\n" : "=r" (periphbase));
197
198 return (periphbase & CBAR_MASK) + GIC_DIST_OFFSET;
199}
200
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200201static void __secure stm32mp_raise_sgi0(int cpu)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200202{
203 u32 gic_dist_addr;
204
205 gic_dist_addr = stm32mp_get_gicd_base_address();
206
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200207 /* ask cpu with SGI0 */
208 writel((BIT(cpu) << 16), gic_dist_addr + GICD_SGIR);
Patrick Delaunaye0207372018-04-16 10:13:24 +0200209}
210
211void __secure psci_arch_cpu_entry(void)
212{
213 u32 cpu = psci_get_cpu_id();
214
215 psci_set_state(cpu, PSCI_AFFINITY_LEVEL_ON);
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200216
Ludovic Barre5a40c512020-03-02 11:27:02 +0100217 /* write the saved cntfrq */
218 cp15_write_cntfrq(cntfrq);
219
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200220 /* reset magic in TAMP register */
221 writel(0xFFFFFFFF, TAMP_BACKUP_MAGIC_NUMBER);
Patrick Delaunaye0207372018-04-16 10:13:24 +0200222}
223
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200224s32 __secure psci_features(u32 function_id, u32 psci_fid)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200225{
226 switch (psci_fid) {
227 case ARM_PSCI_0_2_FN_PSCI_VERSION:
228 case ARM_PSCI_0_2_FN_CPU_OFF:
229 case ARM_PSCI_0_2_FN_CPU_ON:
230 case ARM_PSCI_0_2_FN_AFFINITY_INFO:
231 case ARM_PSCI_0_2_FN_MIGRATE_INFO_TYPE:
232 case ARM_PSCI_0_2_FN_SYSTEM_OFF:
233 case ARM_PSCI_0_2_FN_SYSTEM_RESET:
Marek Vasut83ec9582022-02-25 02:15:59 +0100234 case ARM_PSCI_1_0_FN_SYSTEM_SUSPEND:
Patrick Delaunaye0207372018-04-16 10:13:24 +0200235 return 0x0;
236 }
237 return ARM_PSCI_RET_NI;
238}
239
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200240u32 __secure psci_version(void)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200241{
242 return ARM_PSCI_VER_1_0;
243}
244
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200245s32 __secure psci_affinity_info(u32 function_id, u32 target_affinity,
Patrick Delaunaye0207372018-04-16 10:13:24 +0200246 u32 lowest_affinity_level)
247{
248 u32 cpu = target_affinity & MPIDR_AFF0;
249
250 if (lowest_affinity_level > 0)
251 return ARM_PSCI_RET_INVAL;
252
253 if (target_affinity & ~MPIDR_AFF0)
254 return ARM_PSCI_RET_INVAL;
255
256 if (cpu >= STM32MP1_PSCI_NR_CPUS)
257 return ARM_PSCI_RET_INVAL;
258
259 return psci_state[cpu];
260}
261
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200262u32 __secure psci_migrate_info_type(void)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200263{
Patrick Delaunaydde32d62019-02-27 17:01:16 +0100264 /*
265 * in Power_State_Coordination_Interface_PDD_v1_1_DEN0022D.pdf
266 * return 2 = Trusted OS is either not present or does not require
267 * migration, system of this type does not require the caller
268 * to use the MIGRATE function.
269 * MIGRATE function calls return NOT_SUPPORTED.
270 */
Patrick Delaunaye0207372018-04-16 10:13:24 +0200271 return 2;
272}
273
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200274s32 __secure psci_cpu_on(u32 function_id, u32 target_cpu, u32 pc,
Patrick Delaunaye0207372018-04-16 10:13:24 +0200275 u32 context_id)
276{
277 u32 cpu = target_cpu & MPIDR_AFF0;
278
279 if (target_cpu & ~MPIDR_AFF0)
280 return ARM_PSCI_RET_INVAL;
281
282 if (cpu >= STM32MP1_PSCI_NR_CPUS)
283 return ARM_PSCI_RET_INVAL;
284
285 if (psci_state[cpu] == PSCI_AFFINITY_LEVEL_ON)
286 return ARM_PSCI_RET_ALREADY_ON;
287
Ludovic Barre5a40c512020-03-02 11:27:02 +0100288 /* read and save cntfrq of current cpu to write on target cpu */
289 cntfrq = cp15_read_cntfrq();
290
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200291 /* reset magic in TAMP register */
292 if (readl(TAMP_BACKUP_MAGIC_NUMBER))
293 writel(0xFFFFFFFF, TAMP_BACKUP_MAGIC_NUMBER);
294 /*
295 * ROM code need a first SGI0 after core reset
296 * core is ready when magic is set to 0 in ROM code
297 */
298 while (readl(TAMP_BACKUP_MAGIC_NUMBER))
299 stm32mp_raise_sgi0(cpu);
300
Patrick Delaunaye0207372018-04-16 10:13:24 +0200301 /* store target PC and context id*/
302 psci_save(cpu, pc, context_id);
303
304 /* write entrypoint in backup RAM register */
305 writel((u32)&psci_cpu_entry, TAMP_BACKUP_BRANCH_ADDRESS);
306 psci_set_state(cpu, PSCI_AFFINITY_LEVEL_ON_PENDING);
307
308 /* write magic number in backup register */
309 if (cpu == 0x01)
310 writel(BOOT_API_A7_CORE1_MAGIC_NUMBER,
311 TAMP_BACKUP_MAGIC_NUMBER);
312 else
313 writel(BOOT_API_A7_CORE0_MAGIC_NUMBER,
314 TAMP_BACKUP_MAGIC_NUMBER);
315
Patrick Delaunayad1bc0a2019-04-18 17:32:40 +0200316 /* Generate an IT to start the core */
317 stm32mp_raise_sgi0(cpu);
Patrick Delaunaye0207372018-04-16 10:13:24 +0200318
319 return ARM_PSCI_RET_SUCCESS;
320}
321
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200322s32 __secure psci_cpu_off(void)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200323{
324 u32 cpu;
325
326 cpu = psci_get_cpu_id();
327
328 psci_cpu_off_common();
329 psci_set_state(cpu, PSCI_AFFINITY_LEVEL_OFF);
330
331 /* reset core: wfi is managed by BootRom */
332 if (cpu == 0x01)
333 writel(RCC_MP_GRSTCSETR_MPUP1RST, RCC_MP_GRSTCSETR);
334 else
335 writel(RCC_MP_GRSTCSETR_MPUP0RST, RCC_MP_GRSTCSETR);
336
337 /* just waiting reset */
338 while (1)
339 wfi();
340}
341
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200342void __secure psci_system_reset(void)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200343{
344 /* System reset */
345 writel(RCC_MP_GRSTCSETR_MPSYSRST, RCC_MP_GRSTCSETR);
346 /* just waiting reset */
347 while (1)
348 wfi();
349}
350
Patrick Delaunay9c59d862019-07-22 14:19:20 +0200351void __secure psci_system_off(void)
Patrick Delaunaye0207372018-04-16 10:13:24 +0200352{
353 /* System Off is not managed, waiting user power off
354 * TODO: handle I2C write in PMIC Main Control register bit 0 = SWOFF
355 */
356 while (1)
357 wfi();
358}
Marek Vasut83ec9582022-02-25 02:15:59 +0100359
360static void __secure secure_udelay(unsigned int delay)
361{
362 u32 freq = cp15_read_cntfrq() / 1000000;
363 u64 start, end;
364
365 delay *= freq;
366
367 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (start));
368 for (;;) {
369 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (end));
370 if ((end - start) > delay)
371 break;
372 }
373}
374
375static int __secure secure_waitbits(u32 reg, u32 mask, u32 val)
376{
377 u32 freq = cp15_read_cntfrq() / 1000000;
378 u32 delay = 500 * freq; /* 500 us */
379 u64 start, end;
380 u32 tmp;
381
382 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (start));
383 for (;;) {
384 tmp = readl(reg);
385 tmp &= mask;
386 if ((tmp & val) == val)
387 return 0;
388 asm volatile("mrrc p15, 0, %Q0, %R0, c14" : "=r" (end));
389 if ((end - start) > delay)
390 return -ETIMEDOUT;
391 }
392}
393
394static void __secure ddr_sr_mode_ssr(u32 *saved_pwrctl)
395{
396 setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
397 RCC_DDRITFCR_DDRC1LPEN | RCC_DDRITFCR_DDRC1EN |
398 RCC_DDRITFCR_DDRC2LPEN | RCC_DDRITFCR_DDRC2EN |
399 RCC_DDRITFCR_DDRCAPBLPEN | RCC_DDRITFCR_DDRPHYCAPBLPEN |
400 RCC_DDRITFCR_DDRCAPBEN | RCC_DDRITFCR_DDRPHYCAPBEN |
401 RCC_DDRITFCR_DDRPHYCEN);
402
403 clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
404 RCC_DDRITFCR_AXIDCGEN | RCC_DDRITFCR_DDRCKMOD_MASK);
405
406 /* Disable HW LP interface of uMCTL2 */
407 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_HWLPCTL,
408 DDRCTRL_HWLPCTL_HW_LP_EN);
409
410 /* Configure Automatic LP modes of uMCTL2 */
411 clrsetbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRTMG,
412 DDRCTRL_PWRTMG_SELFREF_TO_X32_MASK,
413 DDRCTRL_PWRTMG_SELFREF_TO_X32_0);
414
415 /* Save PWRCTL register to restart ASR after suspend (if applicable) */
416 *saved_pwrctl = readl(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL);
417
418 /*
419 * Disable Clock disable with LP modes
420 * (used in RUN mode for LPDDR2 with specific timing).
421 */
422 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
423 DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
424
425 /* Disable automatic Self-Refresh mode */
426 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
427 DDRCTRL_PWRCTL_SELFREF_EN);
428}
429
430static void __secure ddr_sr_mode_restore(u32 saved_pwrctl)
431{
432 saved_pwrctl &= DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE |
433 DDRCTRL_PWRCTL_SELFREF_EN;
434
435 /* Restore ASR mode in case it was enabled before suspend. */
436 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, saved_pwrctl);
437}
438
439static int __secure ddr_sw_self_refresh_in(void)
440{
441 int ret;
442
443 clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN);
444
445 /* Blocks AXI ports from taking anymore transactions */
446 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0,
447 DDRCTRL_PCTRL_N_PORT_EN);
448 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1,
449 DDRCTRL_PCTRL_N_PORT_EN);
450
451 /*
452 * Waits unit all AXI ports are idle
453 * Poll PSTAT.rd_port_busy_n = 0
454 * Poll PSTAT.wr_port_busy_n = 0
455 */
456 ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_PSTAT,
457 DDRCTRL_PSTAT_RD_PORT_BUSY_0 |
458 DDRCTRL_PSTAT_RD_PORT_BUSY_1 |
459 DDRCTRL_PSTAT_WR_PORT_BUSY_0 |
460 DDRCTRL_PSTAT_WR_PORT_BUSY_1, 0);
461 if (ret)
462 goto pstat_failed;
463
464 /* SW Self-Refresh entry */
465 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
466
467 /*
468 * Wait operating mode change in self-refresh mode
469 * with STAT.operating_mode[1:0]==11.
470 * Ensure transition to self-refresh was due to software
471 * by checking also that STAT.selfref_type[1:0]=2.
472 */
473 ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_STAT,
474 DDRCTRL_STAT_OPERATING_MODE_MASK |
475 DDRCTRL_STAT_SELFREF_TYPE_MASK,
476 DDRCTRL_STAT_OPERATING_MODE_SR |
477 DDRCTRL_STAT_SELFREF_TYPE_SR);
478 if (ret)
479 goto selfref_sw_failed;
480
481 /* IOs powering down (PUBL registers) */
482 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD);
483 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDR);
484
485 clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
486 DDRPHYC_ACIOCR_CKPDD_MASK,
487 DDRPHYC_ACIOCR_CKPDD_0);
488
489 clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
490 DDRPHYC_ACIOCR_CKPDR_MASK,
491 DDRPHYC_ACIOCR_CKPDR_0);
492
493 clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR,
494 DDRPHYC_ACIOCR_CSPDD_MASK,
495 DDRPHYC_ACIOCR_CSPDD_0);
496
497 /* Disable command/address output driver */
498 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACOE);
499
500 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD);
501
502 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR);
503
504 clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR,
505 DDRPHYC_DSGCR_ODTPDD_MASK,
506 DDRPHYC_DSGCR_ODTPDD_0);
507
508 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD);
509
510 clrsetbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR,
511 DDRPHYC_DSGCR_CKEPDD_MASK,
512 DDRPHYC_DSGCR_CKEPDD_0);
513
514 /* Disable PZQ cell (PUBL register) */
515 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD);
516
517 /* Set latch */
518 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKOE);
519
520 /* Additional delay to avoid early latch */
521 secure_udelay(10);
522
523 /* Activate sw retention in PWRCTRL */
524 setbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRRETEN);
525
526 /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
527 setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
528
529 /* Disable all DLLs: GLITCH window */
530 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLDIS);
531
532 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX0DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
533
534 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX1DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
535
536 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX2DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
537
538 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX3DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
539
540 /* Switch controller clocks (uMCTL2/PUBL) to DLL output clock */
541 clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
542
543 /* Deactivate all DDR clocks */
544 clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
545 RCC_DDRITFCR_DDRC1EN | RCC_DDRITFCR_DDRC2EN |
546 RCC_DDRITFCR_DDRCAPBEN | RCC_DDRITFCR_DDRPHYCAPBEN);
547
548 return 0;
549
550selfref_sw_failed:
551 /* This bit should be cleared to restore DDR in its previous state */
552 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL,
553 DDRCTRL_PWRCTL_SELFREF_SW);
554
555pstat_failed:
556 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0,
557 DDRCTRL_PCTRL_N_PORT_EN);
558 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1,
559 DDRCTRL_PCTRL_N_PORT_EN);
560
561 return -EINVAL;
562};
563
564static void __secure ddr_sw_self_refresh_exit(void)
565{
566 int ret;
567
568 /* Enable all clocks */
569 setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR,
570 RCC_DDRITFCR_DDRC1EN | RCC_DDRITFCR_DDRC2EN |
571 RCC_DDRITFCR_DDRPHYCEN | RCC_DDRITFCR_DDRPHYCAPBEN |
572 RCC_DDRITFCR_DDRCAPBEN);
573
574 /* Handshake */
575 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
576
577 /* Mask dfi_init_complete_en */
578 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_DFIMISC,
579 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
580
581 /* Ack */
582 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
583 ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_SWSTAT,
584 DDRCTRL_SWSTAT_SW_DONE_ACK,
585 DDRCTRL_SWSTAT_SW_DONE_ACK);
586 if (ret)
587 hang();
588
589 /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
590 setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
591
592 /* Enable all DLLs: GLITCH window */
593 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR,
594 DDRPHYC_ACDLLCR_DLLDIS);
595
596 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX0DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
597
598 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX1DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
599
600 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX2DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
601
602 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DX3DLLCR, DDRPHYC_DXNDLLCR_DLLDIS);
603
604 /* Additional delay to avoid early DLL clock switch */
605 secure_udelay(50);
606
607 /* Switch controller clocks (uMCTL2/PUBL) to DLL ref clock */
608 clrbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_GSKPCTRL);
609
610 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLSRST);
611
612 secure_udelay(10);
613
614 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACDLLCR, DDRPHYC_ACDLLCR_DLLSRST);
615
616 /* PHY partial init: (DLL lock and ITM reset) */
617 writel(DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK |
618 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_INIT,
619 STM32_DDRPHYC_BASE + DDRPHYC_PIR);
620
621 /* Need to wait at least 10 clock cycles before accessing PGSR */
622 secure_udelay(1);
623
624 /* Pool end of init */
625 ret = secure_waitbits(STM32_DDRPHYC_BASE + DDRPHYC_PGSR,
626 DDRPHYC_PGSR_IDONE, DDRPHYC_PGSR_IDONE);
627 if (ret)
628 hang();
629
630 /* Handshake */
631 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
632
633 /* Unmask dfi_init_complete_en to uMCTL2 */
634 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_DFIMISC, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
635
636 /* Ack */
637 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_SWCTL, DDRCTRL_SWCTL_SW_DONE);
638 ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_SWSTAT,
639 DDRCTRL_SWSTAT_SW_DONE_ACK,
640 DDRCTRL_SWSTAT_SW_DONE_ACK);
641 if (ret)
642 hang();
643
644 /* Deactivate sw retention in PWR */
645 clrbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRRETEN);
646
647 /* Enable PZQ cell (PUBL register) */
648 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ZQ0CR0, DDRPHYC_ZQ0CRN_ZQPD);
649
650 /* Enable pad drivers */
651 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACPDD);
652
653 /* Enable command/address output driver */
654 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_ACOE);
655
656 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_CKPDD_MASK);
657
658 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_ACIOCR, DDRPHYC_ACIOCR_CSPDD_MASK);
659
660 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDD);
661
662 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DXCCR, DDRPHYC_DXCCR_DXPDR);
663
664 /* Release latch */
665 setbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKOE);
666
667 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_ODTPDD_MASK);
668
669 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_NL2PD);
670
671 clrbits_le32(STM32_DDRPHYC_BASE + DDRPHYC_DSGCR, DDRPHYC_DSGCR_CKEPDD_MASK);
672
673 /* Remove selfrefresh */
674 clrbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
675
676 /* Wait operating_mode == normal */
677 ret = secure_waitbits(STM32_DDRCTRL_BASE + DDRCTRL_STAT,
678 DDRCTRL_STAT_OPERATING_MODE_MASK,
679 DDRCTRL_STAT_OPERATING_MODE_NORMAL);
680 if (ret)
681 hang();
682
683 /* AXI ports are no longer blocked from taking transactions */
684 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_0, DDRCTRL_PCTRL_N_PORT_EN);
685 setbits_le32(STM32_DDRCTRL_BASE + DDRCTRL_PCTRL_1, DDRCTRL_PCTRL_N_PORT_EN);
686
687 setbits_le32(STM32_RCC_BASE + RCC_DDRITFCR, RCC_DDRITFCR_AXIDCGEN);
688}
689
690void __secure psci_system_suspend(u32 __always_unused function_id,
691 u32 ep, u32 context_id)
692{
693 u32 saved_pwrctl, reg;
694
695 /* Disable IO compensation */
696
697 /* Place current APSRC/ANSRC into RAPSRC/RANSRC */
698 reg = readl(STM32_SYSCFG_BASE + SYSCFG_CMPCR);
699 reg >>= 8;
700 reg &= 0xff << 16;
701 reg |= SYSCFG_CMPCR_SW_CTRL;
702 writel(reg, STM32_SYSCFG_BASE + SYSCFG_CMPCR);
703 writel(SYSCFG_CMPENR_MPUEN, STM32_SYSCFG_BASE + SYSCFG_CMPENCLRR);
704
705 writel(RCC_MP_CIFR_WKUPF, STM32_RCC_BASE + RCC_MP_CIFR);
706 setbits_le32(STM32_RCC_BASE + RCC_MP_CIER, RCC_MP_CIFR_WKUPF);
707
708 setbits_le32(STM32_PWR_BASE + PWR_MPUCR,
709 PWR_MPUCR_CSSF | PWR_MPUCR_CSTDBYDIS | PWR_MPUCR_PDDS);
710
711 psci_v7_flush_dcache_all();
712 ddr_sr_mode_ssr(&saved_pwrctl);
713 ddr_sw_self_refresh_in();
714 setbits_le32(STM32_PWR_BASE + PWR_CR3, PWR_CR3_DDRSREN);
715 writel(0x3, STM32_RCC_BASE + RCC_MP_SREQSETR);
716
717 /* Zzz, enter stop mode */
718 asm volatile(
719 "isb\n"
720 "dsb\n"
721 "wfi\n");
722
723 writel(0x3, STM32_RCC_BASE + RCC_MP_SREQCLRR);
724 ddr_sw_self_refresh_exit();
725 ddr_sr_mode_restore(saved_pwrctl);
726
727 writel(SYSCFG_CMPENR_MPUEN, STM32_SYSCFG_BASE + SYSCFG_CMPENSETR);
728 clrbits_le32(STM32_SYSCFG_BASE + SYSCFG_CMPCR, SYSCFG_CMPCR_SW_CTRL);
729}