blob: 6e06b9414c53b7046e0e7972ba466ca995a1136a [file] [log] [blame]
Tony Xie42e113e2016-07-16 11:16:51 +08001/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
Antonio Nino Diaz493bf332016-12-14 14:31:32 +000014 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
Tony Xie42e113e2016-07-16 11:16:51 +080018 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <platform_def.h>
Caesar Wang5339d182016-10-27 01:13:34 +080034#include <pmu_regs.h>
Tony Xie42e113e2016-07-16 11:16:51 +080035
36 .globl clst_warmboot_data
37
Caesar Wang5339d182016-10-27 01:13:34 +080038 .macro sram_func _name
Soren Brinkmann89dfb972016-11-24 14:04:38 -080039 .cfi_sections .debug_frame
Caesar Wang5339d182016-10-27 01:13:34 +080040 .section .sram.text, "ax"
41 .type \_name, %function
42 .func \_name
Soren Brinkmann89dfb972016-11-24 14:04:38 -080043 .cfi_startproc
Caesar Wang5339d182016-10-27 01:13:34 +080044 \_name:
45 .endm
46
47#define CRU_CLKSEL_CON6 0x118
48
49#define DDRCTL0_C_SYSREQ_CFG 0x0100
50#define DDRCTL1_C_SYSREQ_CFG 0x1000
51
52#define DDRC0_SREF_DONE_EXT 0x01
53#define DDRC1_SREF_DONE_EXT 0x04
54
Tony Xie42e113e2016-07-16 11:16:51 +080055#define PLL_MODE_SHIFT (0x8)
56#define PLL_NORMAL_MODE ((0x3 << (PLL_MODE_SHIFT + 16)) | \
57 (0x1 << PLL_MODE_SHIFT))
58#define MPIDR_CLST_L_BITS 0x0
59 /*
60 * For different socs, if we want to speed up warmboot,
61 * we need to config some regs here.
62 * If scu was suspend, we must resume related clk
63 * from slow (24M) mode to normal mode first.
64 * X0: MPIDR_EL1 & MPIDR_CLUSTER_MASK
65 */
66.macro func_rockchip_clst_warmboot
67 adr x4, clst_warmboot_data
68 lsr x5, x0, #6
69 ldr w3, [x4, x5]
70 str wzr, [x4, x5]
71 cmp w3, #PMU_CLST_RET
72 b.ne clst_warmboot_end
73 ldr w6, =(PLL_NORMAL_MODE)
74 /*
75 * core_l offset is CRU_BASE + 0xc,
76 * core_b offset is CRU_BASE + 0x2c
77 */
78 ldr x7, =(CRU_BASE + 0xc)
79 lsr x2, x0, #3
80 str w6, [x7, x2]
81clst_warmboot_end:
82.endm
83
84.macro rockchip_clst_warmboot_data
85clst_warmboot_data:
86 .rept PLATFORM_CLUSTER_COUNT
87 .word 0
88 .endr
89.endm
Caesar Wang5339d182016-10-27 01:13:34 +080090
91 /* -----------------------------------------------
92 * void sram_func_set_ddrctl_pll(uint32_t pll_src)
93 * Function to switch the PLL source for ddrctrl
94 * In: x0 - The PLL of the clk_ddrc clock source
95 * out: None
96 * Clobber list : x0 - x3, x5, x8 - x10
97 * -----------------------------------------------
98 */
99
100 .globl sram_func_set_ddrctl_pll
101
102sram_func sram_func_set_ddrctl_pll
103 /* backup parameter */
104 mov x8, x0
105
106 /* disable the MMU at EL3 */
107 mrs x9, sctlr_el3
108 bic x10, x9, #(SCTLR_M_BIT)
109 msr sctlr_el3, x10
110 isb
111 dsb sy
112
113 /* enable ddrctl0_1 idle request */
114 mov x5, PMU_BASE
115 ldr w0, [x5, #PMU_SFT_CON]
116 orr w0, w0, #DDRCTL0_C_SYSREQ_CFG
117 orr w0, w0, #DDRCTL1_C_SYSREQ_CFG
118 str w0, [x5, #PMU_SFT_CON]
119
120check_ddrc0_1_sref_enter:
121 ldr w1, [x5, #PMU_DDR_SREF_ST]
122 and w2, w1, #DDRC0_SREF_DONE_EXT
123 and w3, w1, #DDRC1_SREF_DONE_EXT
124 orr w2, w2, w3
125 cmp w2, #(DDRC0_SREF_DONE_EXT | DDRC1_SREF_DONE_EXT)
126 b.eq check_ddrc0_1_sref_enter
127
128 /*
129 * select a PLL for ddrctrl:
130 * x0 = 0: ALPLL
131 * x0 = 1: ABPLL
132 * x0 = 2: DPLL
133 * x0 = 3: GPLLL
134 */
135 mov x5, CRU_BASE
136 lsl w0, w8, #4
137 orr w0, w0, #0x00300000
138 str w0, [x5, #CRU_CLKSEL_CON6]
139
140 /* disable ddrctl0_1 idle request */
141 mov x5, PMU_BASE
142 ldr w0, [x5, #PMU_SFT_CON]
143 bic w0, w0, #DDRCTL0_C_SYSREQ_CFG
144 bic w0, w0, #DDRCTL1_C_SYSREQ_CFG
145 str w0, [x5, #PMU_SFT_CON]
146
147check_ddrc0_1_sref_exit:
148 ldr w1, [x5, #PMU_DDR_SREF_ST]
149 and w2, w1, #DDRC0_SREF_DONE_EXT
150 and w3, w1, #DDRC1_SREF_DONE_EXT
151 orr w2, w2, w3
152 cmp w2, #0x0
153 b.eq check_ddrc0_1_sref_exit
154
155 /* reenable the MMU at EL3 */
156 msr sctlr_el3, x9
157 isb
158 dsb sy
159
160 ret
161endfunc sram_func_set_ddrctl_pll