blob: e941b8a3458169d18e0fbd24f46467a070ece2b9 [file] [log] [blame]
Soby Mathew7b5c9b32016-08-08 12:42:53 +01001/*
Alexei Fedorov3a377b22019-05-10 16:55:16 +01002 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
Soby Mathew7b5c9b32016-08-08 12:42:53 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathew7b5c9b32016-08-08 12:42:53 +01005 */
6
7#include <asm_macros.S>
8
9 .globl spin_lock
10 .globl spin_unlock
11
Soby Mathewad042012019-09-25 14:03:41 +010012#if USE_SPINLOCK_CAS
13#if !ARM_ARCH_AT_LEAST(8, 1)
14#error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform
15#endif
Soby Mathew7b5c9b32016-08-08 12:42:53 +010016
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000017/*
18 * When compiled for ARMv8.1 or later, choose spin locks based on Compare and
19 * Swap instruction.
20 */
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000021
22/*
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000023 * Acquire lock using Compare and Swap instruction.
24 *
Soby Mathewad042012019-09-25 14:03:41 +010025 * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use
26 * load exclusive semantics to monitor the address and enter WFE.
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000027 *
28 * void spin_lock(spinlock_t *lock);
29 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010030func spin_lock
31 mov w2, #1
Soby Mathewad042012019-09-25 14:03:41 +0100321: mov w1, wzr
332: casa w1, w2, [x0]
34 cbz w1, 3f
35 ldxr w1, [x0]
36 cbz w1, 2b
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000037 wfe
Soby Mathewad042012019-09-25 14:03:41 +010038 b 1b
393:
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000040 ret
41endfunc spin_lock
42
Soby Mathewad042012019-09-25 14:03:41 +010043#else /* !USE_SPINLOCK_CAS */
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000044
45/*
46 * Acquire lock using load-/store-exclusive instruction pair.
47 *
48 * void spin_lock(spinlock_t *lock);
49 */
50func spin_lock
51 mov w2, #1
52 sevl
Soby Mathew7b5c9b32016-08-08 12:42:53 +010053l1: wfe
54l2: ldaxr w1, [x0]
55 cbnz w1, l1
56 stxr w1, w2, [x0]
57 cbnz w1, l2
58 ret
59endfunc spin_lock
60
Soby Mathewad042012019-09-25 14:03:41 +010061#endif /* USE_SPINLOCK_CAS */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010062
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000063/*
64 * Release lock previously acquired by spin_lock.
65 *
Soby Mathewad042012019-09-25 14:03:41 +010066 * Use store-release to unconditionally clear the spinlock variable.
67 * Store operation generates an event to all cores waiting in WFE
68 * when address is monitored by the global monitor.
Jeenu Viswambharanfca76802017-01-16 16:52:35 +000069 *
70 * void spin_unlock(spinlock_t *lock);
71 */
Soby Mathew7b5c9b32016-08-08 12:42:53 +010072func spin_unlock
73 stlr wzr, [x0]
Soby Mathew7b5c9b32016-08-08 12:42:53 +010074 ret
75endfunc spin_unlock