blob: 59305d847204a4e39029c029267dea31cf6f388c [file] [log] [blame]
Soby Mathew7b5c9b32016-08-08 12:42:53 +01001/*
Jeenu Viswambharanfca76802017-01-16 16:52:35 +00002 * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
Soby Mathew7b5c9b32016-08-08 12:42:53 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathew7b5c9b32016-08-08 12:42:53 +01005 */
6
7#include <asm_macros.S>
8
9 .globl spin_lock
10 .globl spin_unlock
11
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000012#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1))
Soby Mathew7b5c9b32016-08-08 12:42:53 +010013
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000014/*
15 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
16 * Swap instruction.
17 */
18# define USE_CAS 1
19
20/*
21 * Lock contenders using CAS, upon failing to acquire the lock, wait with the
22 * monitor in open state. Therefore, a normal store upon unlocking won't
23 * generate an SEV. Use explicit SEV instruction with CAS unlock.
24 */
25# define COND_SEV() sev
26
27#else
28
29# define USE_CAS 0
30
31/*
32 * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait
33 * with the monitor in exclusive state. A normal store upon unlocking will
34 * implicitly generate an envent; so, no explicit SEV with unlock is required.
35 */
36# define COND_SEV()
37
38#endif
39
40#if USE_CAS
41
42 .arch armv8.1-a
43
44/*
45 * Acquire lock using Compare and Swap instruction.
46 *
47 * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns
48 * 0.
49 *
50 * void spin_lock(spinlock_t *lock);
51 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010052func spin_lock
53 mov w2, #1
54 sevl
Jeenu Viswambharanfca76802017-01-16 16:52:35 +0000551:
56 wfe
57 mov w1, wzr
58 casa w1, w2, [x0]
59 cbnz w1, 1b
60 ret
61endfunc spin_lock
62
63 .arch armv8-a
64
65#else /* !USE_CAS */
66
67/*
68 * Acquire lock using load-/store-exclusive instruction pair.
69 *
70 * void spin_lock(spinlock_t *lock);
71 */
72func spin_lock
73 mov w2, #1
74 sevl
Soby Mathew7b5c9b32016-08-08 12:42:53 +010075l1: wfe
76l2: ldaxr w1, [x0]
77 cbnz w1, l1
78 stxr w1, w2, [x0]
79 cbnz w1, l2
80 ret
81endfunc spin_lock
82
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000083#endif /* USE_CAS */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010084
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000085/*
86 * Release lock previously acquired by spin_lock.
87 *
88 * Unconditionally write 0, and conditionally generate an event.
89 *
90 * void spin_unlock(spinlock_t *lock);
91 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010092func spin_unlock
93 stlr wzr, [x0]
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000094 COND_SEV()
Soby Mathew7b5c9b32016-08-08 12:42:53 +010095 ret
96endfunc spin_unlock