blob: fd7c6dd1590bd5d0d614a57afce056f9e44be3f4 [file] [log] [blame]
Soby Mathewd29f67b2016-05-05 12:31:57 +01001/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <assert_macros.S>
34
Yatharth Kocharf528faf2016-06-28 16:58:26 +010035 .globl smc
Soby Mathewd29f67b2016-05-05 12:31:57 +010036 .globl zeromem
Yatharth Kocharf528faf2016-06-28 16:58:26 +010037 .globl disable_mmu_icache_secure
38 .globl disable_mmu_secure
39
40func smc
41 /*
42 * For AArch32 only r0-r3 will be in the registers;
43 * rest r4-r6 will be pushed on to the stack. So here, we'll
44 * have to load them from the stack to registers r4-r6 explicitly.
45 * Clobbers: r4-r6
46 */
47 ldm sp, {r4, r5, r6}
48 smc #0
49endfunc smc
Soby Mathewd29f67b2016-05-05 12:31:57 +010050
51/* -----------------------------------------------------------------------
52 * void zeromem(void *mem, unsigned int length);
53 *
54 * Initialise a memory region to 0.
55 * The memory address and length must be 4-byte aligned.
56 * -----------------------------------------------------------------------
57 */
58func zeromem
59#if ASM_ASSERTION
60 tst r0, #0x3
61 ASM_ASSERT(eq)
62 tst r1, #0x3
63 ASM_ASSERT(eq)
64#endif
65 add r2, r0, r1
66 mov r1, #0
67z_loop:
68 cmp r2, r0
69 beq z_end
70 str r1, [r0], #4
71 b z_loop
72z_end:
73 bx lr
74endfunc zeromem
Yatharth Kocharf528faf2016-06-28 16:58:26 +010075
76/* ---------------------------------------------------------------------------
77 * Disable the MMU in Secure State
78 * ---------------------------------------------------------------------------
79 */
80
81func disable_mmu_secure
82 mov r1, #(SCTLR_M_BIT | SCTLR_C_BIT)
83do_disable_mmu:
84 ldcopr r0, SCTLR
85 bic r0, r0, r1
86 stcopr r0, SCTLR
87 isb // ensure MMU is off
88 dsb sy
89 bx lr
90endfunc disable_mmu_secure
91
92
93func disable_mmu_icache_secure
94 ldr r1, =(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
95 b do_disable_mmu
96endfunc disable_mmu_icache_secure