blob: d0569f1cdc95ff515dcfb5a872f324ab4f83f4b2 [file] [log] [blame]
Soby Mathew7b5c9b32016-08-08 12:42:53 +01001/*
Alexei Fedorov3a377b22019-05-10 16:55:16 +01002 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
Soby Mathew7b5c9b32016-08-08 12:42:53 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathew7b5c9b32016-08-08 12:42:53 +01005 */
6
7#include <asm_macros.S>
8
9 .globl spin_lock
10 .globl spin_unlock
11
Jeenu Viswambharan0bc79d92017-08-16 11:44:25 +010012#if ARM_ARCH_AT_LEAST(8, 1)
Soby Mathew7b5c9b32016-08-08 12:42:53 +010013
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000014/*
15 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
16 * Swap instruction.
17 */
18# define USE_CAS 1
19
20/*
21 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
22 * monitor in open state. Therefore, a normal store upon unlocking won't
23 * generate an SEV. Use explicit SEV instruction with CAS unlock.
24 */
25# define COND_SEV() sev
26
27#else
28
29# define USE_CAS 0
30
31/*
32 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
33 * with the monitor in exclusive state. A normal store upon unlocking will
34 * implicitly generate an envent; so, no explicit SEV with unlock is required.
35 */
36# define COND_SEV()
37
38#endif
39
40#if USE_CAS
41
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000042/*
43 * Acquire lock using Compare and Swap instruction.
44 *
45 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
46 * 0.
47 *
48 * void spin_lock(spinlock_t *lock);
49 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010050func spin_lock
51 mov w2, #1
52 sevl
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000531:
54 wfe
55 mov w1, wzr
56 casa w1, w2, [x0]
57 cbnz w1, 1b
58 ret
59endfunc spin_lock
60
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000061#else /* !USE_CAS */
62
63/*
64 * Acquire lock using load-/store-exclusive instruction pair.
65 *
66 * void spin_lock(spinlock_t *lock);
67 */
68func spin_lock
69 mov w2, #1
70 sevl
Soby Mathew7b5c9b32016-08-08 12:42:53 +010071l1: wfe
72l2: ldaxr w1, [x0]
73 cbnz w1, l1
74 stxr w1, w2, [x0]
75 cbnz w1, l2
76 ret
77endfunc spin_lock
78
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000079#endif /* USE_CAS */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010080
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000081/*
82 * Release lock previously acquired by spin_lock.
83 *
84 * Unconditionally write 0, and conditionally generate an event.
85 *
86 * void spin_unlock(spinlock_t *lock);
87 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010088func spin_unlock
89 stlr wzr, [x0]
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000090 COND_SEV()
Soby Mathew7b5c9b32016-08-08 12:42:53 +010091 ret
92endfunc spin_unlock