blob: bdc9ea0f7d9eeba442f3a46eb657b4f4737160f9 [file] [log] [blame]
Soby Mathew7b5c9b32016-08-08 12:42:53 +01001/*
Jeenu Viswambharanfca76802017-01-16 16:52:35 +00002 * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
Soby Mathew7b5c9b32016-08-08 12:42:53 +01003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <asm_macros.S>
32
33 .globl spin_lock
34 .globl spin_unlock
35
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000036#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1))
Soby Mathew7b5c9b32016-08-08 12:42:53 +010037
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000038/*
39 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
40 * Swap instruction.
41 */
42# define USE_CAS 1
43
44/*
45 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
46 * monitor in open state. Therefore, a normal store upon unlocking won't
47 * generate an SEV. Use explicit SEV instruction with CAS unlock.
48 */
49# define COND_SEV() sev
50
51#else
52
53# define USE_CAS 0
54
55/*
56 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
57 * with the monitor in exclusive state. A normal store upon unlocking will
58 * implicitly generate an envent; so, no explicit SEV with unlock is required.
59 */
60# define COND_SEV()
61
62#endif
63
64#if USE_CAS
65
66 .arch armv8.1-a
67
68/*
69 * Acquire lock using Compare and Swap instruction.
70 *
71 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
72 * 0.
73 *
74 * void spin_lock(spinlock_t *lock);
75 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010076func spin_lock
77 mov w2, #1
78 sevl
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000791:
80 wfe
81 mov w1, wzr
82 casa w1, w2, [x0]
83 cbnz w1, 1b
84 ret
85endfunc spin_lock
86
87 .arch armv8-a
88
89#else /* !USE_CAS */
90
91/*
92 * Acquire lock using load-/store-exclusive instruction pair.
93 *
94 * void spin_lock(spinlock_t *lock);
95 */
96func spin_lock
97 mov w2, #1
98 sevl
Soby Mathew7b5c9b32016-08-08 12:42:53 +010099l1: wfe
100l2: ldaxr w1, [x0]
101 cbnz w1, l1
102 stxr w1, w2, [x0]
103 cbnz w1, l2
104 ret
105endfunc spin_lock
106
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000107#endif /* USE_CAS */
Soby Mathew7b5c9b32016-08-08 12:42:53 +0100108
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000109/*
110 * Release lock previously acquired by spin_lock.
111 *
112 * Unconditionally write 0, and conditionally generate an event.
113 *
114 * void spin_unlock(spinlock_t *lock);
115 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +0100116func spin_unlock
117 stlr wzr, [x0]
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000118 COND_SEV()
Soby Mathew7b5c9b32016-08-08 12:42:53 +0100119 ret
120endfunc spin_unlock