blob: 77bb7fe45f94a9b41228b8b5a738454a53fcebd2 [file] [log] [blame]
Soby Mathew7b5c9b32016-08-08 12:42:53 +01001/*
AlexeiFedorova13f5842024-03-13 12:16:51 +00002 * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved.
Soby Mathew7b5c9b32016-08-08 12:42:53 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathew7b5c9b32016-08-08 12:42:53 +01005 */
6
7#include <asm_macros.S>
8
9 .globl spin_lock
10 .globl spin_unlock
AlexeiFedorova13f5842024-03-13 12:16:51 +000011 .globl bit_lock
12 .globl bit_unlock
Soby Mathew7b5c9b32016-08-08 12:42:53 +010013
Soby Mathewad042012019-09-25 14:03:41 +010014#if USE_SPINLOCK_CAS
15#if !ARM_ARCH_AT_LEAST(8, 1)
16#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
17#endif
Soby Mathew7b5c9b32016-08-08 12:42:53 +010018
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000019/*
20 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
21 * Swap instruction.
22 */
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000023
24/*
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000025 * Acquire lock using Compare and Swap instruction.
26 *
Soby Mathewad042012019-09-25 14:03:41 +010027 * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
28 * load exclusive semantics to monitor the address and enter WFE.
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000029 *
30 * void spin_lock(spinlock_t *lock);
31 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010032func spin_lock
33 mov w2, #1
Soby Mathewad042012019-09-25 14:03:41 +0100341: mov w1, wzr
352: casa w1, w2, [x0]
36 cbz w1, 3f
37 ldxr w1, [x0]
38 cbz w1, 2b
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000039 wfe
Soby Mathewad042012019-09-25 14:03:41 +010040 b 1b
413:
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000042 ret
43endfunc spin_lock
44
Soby Mathewad042012019-09-25 14:03:41 +010045#else /* !USE_SPINLOCK_CAS */
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000046
47/*
48 * Acquire lock using load-/store-exclusive instruction pair.
49 *
50 * void spin_lock(spinlock_t *lock);
51 */
52func spin_lock
53 mov w2, #1
54 sevl
Soby Mathew7b5c9b32016-08-08 12:42:53 +010055l1: wfe
56l2: ldaxr w1, [x0]
57 cbnz w1, l1
58 stxr w1, w2, [x0]
59 cbnz w1, l2
60 ret
61endfunc spin_lock
62
Soby Mathewad042012019-09-25 14:03:41 +010063#endif /* USE_SPINLOCK_CAS */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010064
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000065/*
66 * Release lock previously acquired by spin_lock.
67 *
Soby Mathewad042012019-09-25 14:03:41 +010068 * Use store-release to unconditionally clear the spinlock variable.
69 * Store operation generates an event to all cores waiting in WFE
70 * when address is monitored by the global monitor.
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000071 *
72 * void spin_unlock(spinlock_t *lock);
73 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010074func spin_unlock
75 stlr wzr, [x0]
Soby Mathew7b5c9b32016-08-08 12:42:53 +010076 ret
77endfunc spin_unlock
AlexeiFedorova13f5842024-03-13 12:16:51 +000078
79/*
80 * Atomic bit clear and set instructions require FEAT_LSE which is
81 * mandatory from Armv8.1.
82 */
83#if ARM_ARCH_AT_LEAST(8, 1)
84
85/*
86 * Acquire bitlock using atomic bit set on byte. If the original read value
87 * has the bit set, use load exclusive semantics to monitor the address and
88 * enter WFE.
89 *
90 * void bit_lock(bitlock_t *lock, uint8_t mask);
91 */
92func bit_lock
931: ldsetab w1, w2, [x0]
94 tst w2, w1
95 b.eq 2f
96 ldxrb w2, [x0]
97 tst w2, w1
98 b.eq 1b
99 wfe
100 b 1b
1012:
102 ret
103endfunc bit_lock
104
105/*
106 * Use atomic bit clear store-release to unconditionally clear bitlock variable.
107 * Store operation generates an event to all cores waiting in WFE when address
108 * is monitored by the global monitor.
109 *
110 * void bit_unlock(bitlock_t *lock, uint8_t mask);
111 */
112func bit_unlock
113 stclrlb w1, [x0]
114 ret
115endfunc bit_unlock
116
117#endif /* ARM_ARCH_AT_LEAST(8, 1) */