blob: daa0542d73464edaab49fe1aabdfe174fec84b8d [file] [log] [blame]
Jiafei Panb4ccced2022-01-20 17:40:16 +08001/*
2 * Copyright 2020-2022 NXP
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8#include <asm_macros.S>
9#include <dcfg_lsch2.h>
10#include <nxp_timer.h>
11#include <plat_gic.h>
12#include <scfg.h>
13
14#include <bl31_data.h>
15#include <plat_psci.h>
16#include <platform_def.h>
17
18#define DAIF_DATA AUX_01_DATA
19#define TIMER_CNTRL_DATA AUX_02_DATA
20
21.global soc_init_lowlevel
22.global soc_init_percpu
23.global _soc_core_release
24.global _soc_core_restart
25.global _soc_ck_disabled
26.global _soc_sys_reset
27.global _soc_sys_off
28.global _soc_set_start_addr
29.global _getGICC_BaseAddr
30.global _getGICD_BaseAddr
31.global _soc_core_prep_off
32.global _soc_core_entr_off
33.global _soc_core_exit_off
34.global _soc_core_prep_stdby
35.global _soc_core_entr_stdby
36.global _soc_core_exit_stdby
37.global _soc_core_prep_pwrdn
38.global _soc_core_entr_pwrdn
39.global _soc_core_exit_pwrdn
40.global _soc_clstr_prep_stdby
41.global _soc_clstr_exit_stdby
42.global _soc_clstr_prep_pwrdn
43.global _soc_clstr_exit_pwrdn
44.global _soc_sys_prep_stdby
45.global _soc_sys_exit_stdby
46.global _soc_sys_prep_pwrdn
47.global _soc_sys_pwrdn_wfi
48.global _soc_sys_exit_pwrdn
49
50
51/* This function initialize the soc
52 * in: void
53 * out: void
54 */
55func soc_init_lowlevel
56 ret
57endfunc soc_init_lowlevel
58
59
60/* void soc_init_percpu(void)
61 * this function performs any soc-specific initialization that is needed on
62 * a per-core basis
63 * in: none
64 * out: none
65 * uses x0, x1, x2, x3
66 */
67func soc_init_percpu
68 mov x3, x30
69
70 bl plat_my_core_mask
71 mov x2, x0
72
73 /* see if this core is marked for prefetch disable */
74 mov x0, #PREFETCH_DIS_OFFSET
75 bl _get_global_data /* 0-1 */
76 tst x0, x2
77 b.eq 1f
78 bl _disable_ldstr_pfetch_A72 /* 0 */
791:
80 mov x30, x3
81 ret
82endfunc soc_init_percpu
83
84/* part of CPU_ON
85 * this function releases a secondary core from reset
86 * in: x0 = core_mask_lsb
87 * out: none
88 * uses: x0, x1, x2, x3
89 */
90func _soc_core_release
91
92#if (TEST_BL31)
93 rbit w2, w0
94 /* x2 = core mask msb */
95#else
96 mov x2, x0
97#endif
98 /* write COREBCR */
99 mov x1, #NXP_SCFG_ADDR
100 rev w3, w2
101 str w3, [x1, #SCFG_COREBCR_OFFSET]
102 isb
103
104 /* read-modify-write BRR */
105 mov x1, #NXP_DCFG_ADDR
106 ldr w2, [x1, #DCFG_BRR_OFFSET]
107 rev w3, w2
108 orr w3, w3, w0
109 rev w2, w3
110 str w2, [x1, #DCFG_BRR_OFFSET]
111 isb
112
113 /* send event */
114 sev
115 isb
116 ret
117endfunc _soc_core_release
118
119
120/* part of CPU_ON
121 * this function restarts a core shutdown via _soc_core_entr_off
122 * in: x0 = core mask lsb (of the target cpu)
123 * out: x0 == 0, on success
124 * x0 != 0, on failure
125 * uses x0, x1, x2, x3, x4, x5
126 */
127func _soc_core_restart
128 mov x5, x30
129 mov x3, x0
130
131 /*
132 * unset ph20 request in RCPM_PCPH20CLEARR
133 * this is an lsb-0 register
134 */
135 ldr x1, =NXP_RCPM_ADDR
136 rev w2, w3
137 str w2, [x1, #RCPM_PCPH20CLRR_OFFSET]
138 dsb sy
139 isb
140
141 bl _getGICD_BaseAddr
142 mov x4, x0
143
144 /* enable forwarding of group 0 interrupts by setting GICD_CTLR[0] = 1 */
145 ldr w1, [x4, #GICD_CTLR_OFFSET]
146 orr w1, w1, #GICD_CTLR_EN_GRP0
147 str w1, [x4, #GICD_CTLR_OFFSET]
148 dsb sy
149 isb
150
151
152 /*
153 * fire SGI by writing to GICD_SGIR the following values:
154 * [25:24] = 0x0 (forward interrupt to the CPU interfaces
155 * specified in CPUTargetList field)
156 * [23:16] = core mask lsb[7:0] (forward interrupt to target cpu)
157 * [15] = 0 (forward SGI only if it is configured as group 0 interrupt)
158 * [3:0] = 0xF (interrupt ID = 15)
159 */
160 lsl w1, w3, #16
161 orr w1, w1, #0xF
162 str w1, [x4, #GICD_SGIR_OFFSET]
163 dsb sy
164 isb
165
166 /* load '0' on success */
167 mov x0, xzr
168
169 mov x30, x5
170 ret
171endfunc _soc_core_restart
172
173/*
174 * This function determines if a core is disabled via COREDISR
175 * in: w0 = core_mask_lsb
176 * out: w0 = 0, core not disabled
177 * w0 != 0, core disabled
178 * uses x0, x1, x2
179 */
180func _soc_ck_disabled
181 /* get base addr of dcfg block */
182 mov x1, #NXP_DCFG_ADDR
183
184 /* read COREDISR */
185 ldr w1, [x1, #DCFG_COREDISR_OFFSET]
186 rev w2, w1
187
188 /* test core bit */
189 and w0, w2, w0
190 ret
191endfunc _soc_ck_disabled
192
193/*
194 *This function resets the system via SoC-specific methods
195 * in: none
196 * out: none
197 * uses x0, x1, x2, x3
198 */
199func _soc_sys_reset
200 ldr x2, =NXP_DCFG_ADDR
201
202 /* make sure the mask is cleared in the reset request mask register */
203 mov w1, wzr
204 str w1, [x2, #DCFG_RSTRQMR1_OFFSET]
205
206 /* set the reset request */
207 ldr w1, =RSTCR_RESET_REQ
208 ldr x3, =DCFG_RSTCR_OFFSET
209 rev w0, w1
210 str w0, [x2, x3]
211
212 /*
213 * just in case this address range is mapped as cacheable,
214 * flush the write out of the dcaches
215 */
216 add x3, x2, x3
217 dc cvac, x3
218 dsb st
219 isb
220
221 /* Note: this function does not return */
2221:
223 wfi
224 b 1b
225endfunc _soc_sys_reset
226
227/*
228 * Part of SYSTEM_OFF
229 * this function turns off the SoC clocks
230 * Note: this function is not intended to return, and the only allowable
231 * recovery is POR
232 * in: none
233 * out: none
234 * uses x0, x1, x2, x3, x4, x5, x6, x7, x8, x9
235 */
236func _soc_sys_off
237
238 /* mask interrupts at the core */
239 mrs x1, DAIF
240 mov x0, #DAIF_SET_MASK
241 orr x0, x1, x0
242 msr DAIF, x0
243
244 /* disable icache, dcache, mmu @ EL1 */
245 mov x1, #SCTLR_I_C_M_MASK
246 mrs x0, sctlr_el1
247 bic x0, x0, x1
248 msr sctlr_el1, x0
249
250 /* disable dcache for EL3 */
251 mrs x1, SCTLR_EL3
252 bic x1, x1, #SCTLR_C_MASK
253 /* make sure icache is enabled */
254 orr x1, x1, #SCTLR_I_MASK
255 msr SCTLR_EL3, x1
256 isb
257
258 /* Enable dynamic retention ctrl (CPUECTLR[2:0]) and SMP (CPUECTLR[6]) */
259 mrs x0, CORTEX_A72_ECTLR_EL1
260 orr x0, x0, #CPUECTLR_TIMER_8TICKS
261 orr x0, x0, #CPUECTLR_SMPEN_EN
262 msr CORTEX_A72_ECTLR_EL1, x0
263
264 /* set WFIL2EN in SCFG_CLUSTERPMCR */
265 ldr x0, =SCFG_COREPMCR_OFFSET
266 ldr x1, =COREPMCR_WFIL2
267 bl write_reg_scfg
268
269 /* request LPM20 */
270 mov x0, #RCPM_POWMGTCSR_OFFSET
271 bl read_reg_rcpm
272 orr x1, x0, #RCPM_POWMGTCSR_LPM20_REQ
273 mov x0, #RCPM_POWMGTCSR_OFFSET
274 bl write_reg_rcpm
275
276 dsb sy
277 isb
2781:
279 wfi
280 b 1b
281endfunc _soc_sys_off
282
283/*
284 * Write a register in the RCPM block
285 * in: x0 = offset
286 * in: w1 = value to write
287 * uses x0, x1, x2, x3
288 */
289func write_reg_rcpm
290 ldr x2, =NXP_RCPM_ADDR
291 /* swap for BE */
292 rev w3, w1
293 str w3, [x2, x0]
294 ret
295endfunc write_reg_rcpm
296
297/*
298 * Read a register in the RCPM block
299 * in: x0 = offset
300 * out: w0 = value read
301 * uses x0, x1, x2
302 */
303func read_reg_rcpm
304 ldr x2, =NXP_RCPM_ADDR
305 ldr w1, [x2, x0]
306 /* swap for BE */
307 rev w0, w1
308 ret
309endfunc read_reg_rcpm
310
311/*
312 * Write a register in the SCFG block
313 * in: x0 = offset
314 * in: w1 = value to write
315 * uses x0, x1, x2, x3
316 */
317func write_reg_scfg
318 mov x2, #NXP_SCFG_ADDR
319 /* swap for BE */
320 rev w3, w1
321 str w3, [x2, x0]
322 ret
323endfunc write_reg_scfg
324
325/*
326 * Read a register in the SCFG block
327 * in: x0 = offset
328 * out: w0 = value read
329 * uses x0, x1, x2
330 */
331func read_reg_scfg
332 mov x2, #NXP_SCFG_ADDR
333 ldr w1, [x2, x0]
334 /* swap for BE */
335 rev w0, w1
336 ret
337endfunc read_reg_scfg
338
339/*
340 * Part of CPU_OFF
341 * this function programs SoC & GIC registers in preparation for shutting down
342 * the core
343 * in: x0 = core mask lsb
344 * out: none
345 * uses x0, x1, x2, x3, x4, x5, x6, x7
346 */
347func _soc_core_prep_off
348 mov x7, x30
349 mov x6, x0
350
351 /* Set retention control in CPUECTLR make sure smpen bit is set */
352 mrs x4, CORTEX_A72_ECTLR_EL1
353 bic x4, x4, #CPUECTLR_RET_MASK
354 orr x4, x4, #CPUECTLR_TIMER_8TICKS
355 orr x4, x4, #CPUECTLR_SMPEN_EN
356 msr CORTEX_A72_ECTLR_EL1, x4
357
358 /* save timer control current value */
359 mov x5, #NXP_TIMER_ADDR
360 ldr w4, [x5, #SYS_COUNTER_CNTCR_OFFSET]
361 mov w2, w4
362 mov x0, x6
363 mov x1, #TIMER_CNTRL_DATA
364 bl _setCoreData
365
366 /* enable the timer */
367 orr w4, w4, #CNTCR_EN_MASK
368 str w4, [x5, #SYS_COUNTER_CNTCR_OFFSET]
369
370 bl _getGICC_BaseAddr
371 mov x5, x0
372
373 /* disable signaling of ints */
374 ldr w3, [x5, #GICC_CTLR_OFFSET]
375 bic w3, w3, #GICC_CTLR_EN_GRP0
376 bic w3, w3, #GICC_CTLR_EN_GRP1
377 str w3, [x5, #GICC_CTLR_OFFSET]
378 dsb sy
379 isb
380
381
382 /*
383 * set retention control in SCFG_RETREQCR
384 * Note: this register is msb 0
385 */
386 ldr x4, =SCFG_RETREQCR_OFFSET
387 mov x0, x4
388 bl read_reg_scfg
389 rbit w1, w6
390 orr w1, w0, w1
391 mov x0, x4
392 bl write_reg_scfg
393
394 /* set the priority filter */
395 ldr w2, [x5, #GICC_PMR_OFFSET]
396 orr w2, w2, #GICC_PMR_FILTER
397 str w2, [x5, #GICC_PMR_OFFSET]
398
399 /* setup GICC_CTLR */
400 bic w3, w3, #GICC_CTLR_ACKCTL_MASK
401 orr w3, w3, #GICC_CTLR_FIQ_EN_MASK
402 orr w3, w3, #GICC_CTLR_EOImodeS_MASK
403 orr w3, w3, #GICC_CTLR_CBPR_MASK
404 str w3, [x5, #GICC_CTLR_OFFSET]
405
406 /* setup the banked-per-core GICD registers */
407 bl _getGICD_BaseAddr
408 mov x5, x0
409
410 /* define SGI15 as Grp0 */
411 ldr w2, [x5, #GICD_IGROUPR0_OFFSET]
412 bic w2, w2, #GICD_IGROUP0_SGI15
413 str w2, [x5, #GICD_IGROUPR0_OFFSET]
414
415 /* set priority of SGI 15 to highest... */
416 ldr w2, [x5, #GICD_IPRIORITYR3_OFFSET]
417 bic w2, w2, #GICD_IPRIORITY_SGI15_MASK
418 str w2, [x5, #GICD_IPRIORITYR3_OFFSET]
419
420 /* enable SGI 15 */
421 ldr w2, [x5, #GICD_ISENABLER0_OFFSET]
422 orr w2, w2, #GICD_ISENABLE0_SGI15
423 str w2, [x5, #GICD_ISENABLER0_OFFSET]
424
425 /* enable the cpu interface */
426 bl _getGICC_BaseAddr
427 mov x2, x0
428 orr w3, w3, #GICC_CTLR_EN_GRP0
429 str w3, [x2, #GICC_CTLR_OFFSET]
430
431
432 /* clear any pending SGIs */
433 ldr x2, =GICD_CPENDSGIR_CLR_MASK
434 add x0, x5, #GICD_CPENDSGIR3_OFFSET
435 str w2, [x0]
436
437 /*
438 * Set the PC_PH20_REQ bit in RCPM_PCPH20SETR
439 * this is an lsb-0 register
440 */
441 mov x1, x6
442 mov x0, #RCPM_PCPH20SETR_OFFSET
443 bl write_reg_rcpm
444
445 dsb sy
446 isb
447 mov x30, x7
448 ret
449endfunc _soc_core_prep_off
450
451/*
452 * Part of CPU_OFF
453 * this function performs the final steps to shutdown the core
454 * in: x0 = core mask lsb
455 * out: none
456 * uses x0, x1, x2, x3, x4, x5
457 */
458func _soc_core_entr_off
459 mov x5, x30
460 mov x4, x0
461
462 bl _getGICD_BaseAddr
463 mov x3, x0
464
4653:
466 /* enter low-power state by executing wfi */
467 wfi
468
469 /* see if we got hit by SGI 15 */
470 add x0, x3, #GICD_SPENDSGIR3_OFFSET
471 ldr w2, [x0]
472 and w2, w2, #GICD_SPENDSGIR3_SGI15_MASK
473 cbz w2, 4f
474
475 /* clear the pending SGI */
476 ldr x2, =GICD_CPENDSGIR_CLR_MASK
477 add x0, x3, #GICD_CPENDSGIR3_OFFSET
478 str w2, [x0]
4794:
480 /* check if core has been turned on */
481 mov x0, x4
482 bl _getCoreState
483
484 cmp x0, #CORE_WAKEUP
485 b.ne 3b
486
487 /* if we get here, then we have exited the wfi */
488 dsb sy
489 isb
490 mov x30, x5
491 ret
492endfunc _soc_core_entr_off
493
494/*
495 * Part of CPU_OFF
496 * this function starts the process of starting a core back up
497 * in: x0 = core mask lsb
498 * out: none
499 * uses x0, x1, x2, x3, x4, x5, x6
500 */
501func _soc_core_exit_off
502 mov x6, x30
503 mov x5, x0
504
505 /*
506 * Clear ph20 request in RCPM_PCPH20CLRR - no need
507 * to do that here, it has been done in _soc_core_restart
508 */
509 bl _getGICC_BaseAddr
510 mov x1, x0
511
512 /* read GICC_IAR */
513 ldr w0, [x1, #GICC_IAR_OFFSET]
514
515 /* write GICC_EIOR - signal end-of-interrupt */
516 str w0, [x1, #GICC_EOIR_OFFSET]
517
518 /* write GICC_DIR - disable interrupt */
519 str w0, [x1, #GICC_DIR_OFFSET]
520
521 /* disable signaling of grp0 ints */
522 ldr w3, [x1, #GICC_CTLR_OFFSET]
523 bic w3, w3, #GICC_CTLR_EN_GRP0
524 str w3, [x1, #GICC_CTLR_OFFSET]
525
526 /*
527 * Unset retention request in SCFG_RETREQCR
528 * Note: this register is msb-0
529 */
530 ldr x4, =SCFG_RETREQCR_OFFSET
531 mov x0, x4
532 bl read_reg_scfg
533 rbit w1, w5
534 bic w1, w0, w1
535 mov x0, x4
536 bl write_reg_scfg
537
538 /* restore timer ctrl */
539 mov x0, x5
540 mov x1, #TIMER_CNTRL_DATA
541 bl _getCoreData
542 /* w0 = timer ctrl saved value */
543 mov x2, #NXP_TIMER_ADDR
544 str w0, [x2, #SYS_COUNTER_CNTCR_OFFSET]
545
546 dsb sy
547 isb
548 mov x30, x6
549 ret
550endfunc _soc_core_exit_off
551
552/*
553 * Function loads a 64-bit execution address of the core in the soc registers
554 * BOOTLOCPTRL/H
555 * in: x0, 64-bit address to write to BOOTLOCPTRL/H
556 * uses x0, x1, x2, x3
557 */
558func _soc_set_start_addr
559 /* get the 64-bit base address of the scfg block */
560 ldr x2, =NXP_SCFG_ADDR
561
562 /* write the 32-bit BOOTLOCPTRL register */
563 mov x1, x0
564 rev w3, w1
565 str w3, [x2, #SCFG_BOOTLOCPTRL_OFFSET]
566
567 /* write the 32-bit BOOTLOCPTRH register */
568 lsr x1, x0, #32
569 rev w3, w1
570 str w3, [x2, #SCFG_BOOTLOCPTRH_OFFSET]
571 ret
572endfunc _soc_set_start_addr
573
574/*
575 * This function returns the base address of the gic distributor
576 * in: none
577 * out: x0 = base address of gic distributor
578 * uses x0
579 */
580func _getGICD_BaseAddr
581#if (TEST_BL31)
582 /* defect in simulator - gic base addresses are on 4Kb boundary */
583 ldr x0, =NXP_GICD_4K_ADDR
584#else
585 ldr x0, =NXP_GICD_64K_ADDR
586#endif
587 ret
588endfunc _getGICD_BaseAddr
589
590/*
591 * This function returns the base address of the gic controller
592 * in: none
593 * out: x0 = base address of gic controller
594 * uses x0
595 */
596func _getGICC_BaseAddr
597#if (TEST_BL31)
598 /* defect in simulator - gic base addresses are on 4Kb boundary */
599 ldr x0, =NXP_GICC_4K_ADDR
600#else
601 ldr x0, =NXP_GICC_64K_ADDR
602#endif
603 ret
604endfunc _getGICC_BaseAddr
605
606/*
607 * Part of CPU_SUSPEND
608 * this function puts the calling core into standby state
609 * in: x0 = core mask lsb
610 * out: none
611 * uses x0
612 */
613func _soc_core_entr_stdby
614 dsb sy
615 isb
616 wfi
617
618 ret
619endfunc _soc_core_entr_stdby
620
621
622/*
623 * Part of CPU_SUSPEND
624 * this function performs SoC-specific programming prior to standby
625 * in: x0 = core mask lsb
626 * out: none
627 * uses x0, x1
628 */
629func _soc_core_prep_stdby
630 /* clear CORTEX_A72_ECTLR_EL1[2:0] */
631 mrs x1, CORTEX_A72_ECTLR_EL1
632 bic x1, x1, #CPUECTLR_TIMER_MASK
633 msr CORTEX_A72_ECTLR_EL1, x1
634
635 ret
636endfunc _soc_core_prep_stdby
637
638/*
639 * Part of CPU_SUSPEND
640 * this function performs any SoC-specific cleanup after standby state
641 * in: x0 = core mask lsb
642 * out: none
643 * uses none
644 */
645func _soc_core_exit_stdby
646 ret
647endfunc _soc_core_exit_stdby
648
649/*
650 * Part of CPU_SUSPEND
651 * this function performs SoC-specific programming prior to power-down
652 * in: x0 = core mask lsb
653 * out: none
654 * uses x0, x1, x2, x3, x4, x5
655 */
656func _soc_core_prep_pwrdn
657 mov x5, x30
658 mov x4, x0
659
660 /* enable CPU retention + set smp */
661 mrs x1, CORTEX_A72_ECTLR_EL1
662 orr x1, x1, #0x1
663 orr x1, x1, #CPUECTLR_SMPEN_MASK
664 msr CORTEX_A72_ECTLR_EL1, x1
665
666 /*
667 * set the retention request in SCFG_RETREQCR
668 * this is an msb-0 register
669 */
670 ldr x3, =SCFG_RETREQCR_OFFSET
671 mov x0, x3
672 bl read_reg_scfg
673 rbit w1, w4
674 orr w1, w0, w1
675 mov x0, x3
676 bl write_reg_scfg
677
678 /*
679 * Set the PC_PH20_REQ bit in RCPM_PCPH20SETR
680 * this is an lsb-0 register
681 */
682 mov x1, x4
683 mov x0, #RCPM_PCPH20SETR_OFFSET
684 bl write_reg_rcpm
685
686 mov x30, x5
687 ret
688endfunc _soc_core_prep_pwrdn
689
690/*
691 * Part of CPU_SUSPEND
692 * this function puts the calling core into a power-down state
693 * in: x0 = core mask lsb
694 * out: none
695 * uses x0
696 */
697func _soc_core_entr_pwrdn
698 dsb sy
699 isb
700 wfi
701
702 ret
703endfunc _soc_core_entr_pwrdn
704
705/*
706 * Part of CPU_SUSPEND
707 * this function cleans up after a core exits power-down
708 * in: x0 = core mask lsb
709 * out: none
710 * uses x0, x1, x2, x3, x4, x5
711 */
712func _soc_core_exit_pwrdn
713 mov x5, x30
714 mov x4, x0
715
716 /*
717 * Set the PC_PH20_REQ bit in RCPM_PCPH20CLRR
718 * this is an lsb-0 register
719 */
720 mov x1, x4
721 mov x0, #RCPM_PCPH20CLRR_OFFSET
722 bl write_reg_rcpm
723
724 /*
725 * Unset the retention request in SCFG_RETREQCR
726 * this is an msb-0 register
727 */
728 ldr x3, =SCFG_RETREQCR_OFFSET
729 mov x0, x3
730 bl read_reg_scfg
731 rbit w1, w4
732 bic w1, w0, w1
733 mov x0, x3
734 bl write_reg_scfg
735
736 mov x30, x5
737 ret
738endfunc _soc_core_exit_pwrdn
739
740/*
741 * Part of CPU_SUSPEND
742 * this function performs SoC-specific programming prior to standby
743 * in: x0 = core mask lsb
744 * out: none
745 * uses none
746 */
747func _soc_clstr_prep_stdby
748 /* clear CORTEX_A72_ECTLR_EL1[2:0] */
749 mrs x1, CORTEX_A72_ECTLR_EL1
750 bic x1, x1, #CPUECTLR_TIMER_MASK
751 msr CORTEX_A72_ECTLR_EL1, x1
752
753 ret
754endfunc _soc_clstr_prep_stdby
755
756/*
757 * Part of CPU_SUSPEND
758 * this function performs any SoC-specific cleanup after standby state
759 * in: x0 = core mask lsb
760 * out: none
761 * uses none
762 */
763func _soc_clstr_exit_stdby
764 ret
765endfunc _soc_clstr_exit_stdby
766
767/*
768 * Part of CPU_SUSPEND
769 * this function performs SoC-specific programming prior to power-down
770 * in: x0 = core mask lsb
771 * out: none
772 * uses x0, x1, x2, x3, x4, x5
773 */
774func _soc_clstr_prep_pwrdn
775 mov x5, x30
776 mov x4, x0
777
778 /* enable CPU retention + set smp */
779 mrs x1, CORTEX_A72_ECTLR_EL1
780 orr x1, x1, #0x1
781 orr x1, x1, #CPUECTLR_SMPEN_MASK
782 msr CORTEX_A72_ECTLR_EL1, x1
783
784 /*
785 * Set the retention request in SCFG_RETREQCR
786 * this is an msb-0 register.
787 */
788 ldr x3, =SCFG_RETREQCR_OFFSET
789 mov x0, x3
790 bl read_reg_scfg
791 rbit w1, w4
792 orr w1, w0, w1
793 mov x0, x3
794 bl write_reg_scfg
795
796 /*
797 * Set the PC_PH20_REQ bit in RCPM_PCPH20SETR
798 * this is an lsb-0 register.
799 */
800 mov x1, x4
801 mov x0, #RCPM_PCPH20SETR_OFFSET
802 bl write_reg_rcpm
803
804 mov x30, x5
805 ret
806endfunc _soc_clstr_prep_pwrdn
807
808/*
809 * Part of CPU_SUSPEND
810 * this function cleans up after a core exits power-down
811 * in: x0 = core mask lsb
812 * out: none
813 * uses x0, x1, x2, x3, x4, x5
814 */
815func _soc_clstr_exit_pwrdn
816 mov x5, x30
817 mov x4, x0
818
819 /*
820 * Set the PC_PH20_REQ bit in RCPM_PCPH20CLRR
821 * this is an lsb-0 register.
822 */
823 mov x1, x4
824 mov x0, #RCPM_PCPH20CLRR_OFFSET
825 bl write_reg_rcpm
826
827 /*
828 * Unset the retention request in SCFG_RETREQCR
829 * this is an msb-0 register.
830 */
831 ldr x3, =SCFG_RETREQCR_OFFSET
832 mov x0, x3
833 bl read_reg_scfg
834 rbit w1, w4
835 bic w1, w0, w1
836 mov x0, x3
837 bl write_reg_scfg
838
839 mov x30, x5
840 ret
841endfunc _soc_clstr_exit_pwrdn
842
843/*
844 * Part of CPU_SUSPEND
845 * this function performs SoC-specific programming prior to standby
846 * in: x0 = core mask lsb
847 * out: none
848 * uses none
849 */
850func _soc_sys_prep_stdby
851 /* clear CORTEX_A72_ECTLR_EL1[2:0] */
852 mrs x1, CORTEX_A72_ECTLR_EL1
853 bic x1, x1, #CPUECTLR_TIMER_MASK
854 msr CORTEX_A72_ECTLR_EL1, x1
855
856 ret
857endfunc _soc_sys_prep_stdby
858
859/* Part of CPU_SUSPEND
860 * this function performs any SoC-specific cleanup after standby state
861 * in: x0 = core mask lsb
862 * out: none
863 * uses none
864 */
865func _soc_sys_exit_stdby
866 ret
867endfunc _soc_sys_exit_stdby
868
869/*
870 * Part of CPU_SUSPEND
871 * this function performs SoC-specific programming prior to
872 * suspend-to-power-down
873 * in: x0 = core mask lsb
874 * out: none
875 * uses x0, x1, x2, x3, x4
876 */
877func _soc_sys_prep_pwrdn
878 mov x4, x30
879
880 /* Enable dynamic retention contrl (CPUECTLR[2:0]) and SMP (CPUECTLR[6]) */
881 mrs x0, CORTEX_A72_ECTLR_EL1
882 bic x0, x0, #CPUECTLR_TIMER_MASK
883 orr x0, x0, #CPUECTLR_TIMER_8TICKS
884 orr x0, x0, #CPUECTLR_SMPEN_EN
885 msr CORTEX_A72_ECTLR_EL1, x0
886
887 /* Set WFIL2EN in SCFG_CLUSTERPMCR */
888 ldr x0, =SCFG_COREPMCR_OFFSET
889 ldr x1, =COREPMCR_WFIL2
890 bl write_reg_scfg
891
892 isb
893 mov x30, x4
894 ret
895endfunc _soc_sys_prep_pwrdn
896
897/*
898 * Part of CPU_SUSPEND
899 * this function puts the calling core, and potentially the soc, into a
900 * low-power state
901 * in: x0 = core mask lsb
902 * out: x0 = 0, success
903 * x0 < 0, failure
904 * uses x0, x1, x2, x3, x4
905 */
906func _soc_sys_pwrdn_wfi
907 mov x4, x30
908
909 /* request LPM20 */
910 mov x0, #RCPM_POWMGTCSR_OFFSET
911 bl read_reg_rcpm
912 orr x1, x0, #RCPM_POWMGTCSR_LPM20_REQ
913 mov x0, #RCPM_POWMGTCSR_OFFSET
914 bl write_reg_rcpm
915
916 dsb sy
917 isb
918 wfi
919
920 mov x30, x4
921 ret
922endfunc _soc_sys_pwrdn_wfi
923
924/*
925 * Part of CPU_SUSPEND
926 * this function performs any SoC-specific cleanup after power-down
927 * in: x0 = core mask lsb
928 * out: none
929 * uses x0, x1
930 */
931func _soc_sys_exit_pwrdn
932 /* clear WFIL2_EN in SCFG_COREPMCR */
933 mov x1, #NXP_SCFG_ADDR
934 str wzr, [x1, #SCFG_COREPMCR_OFFSET]
935
936 ret
937endfunc _soc_sys_exit_pwrdn