Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 1 | /* |
Alexei Fedorov | 3a377b2 | 2019-05-10 16:55:16 +0100 | [diff] [blame] | 2 | * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved. |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <asm_macros.S> |
| 8 | |
| 9 | .globl spin_lock |
| 10 | .globl spin_unlock |
| 11 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 12 | #if USE_SPINLOCK_CAS |
| 13 | #if !ARM_ARCH_AT_LEAST(8, 1) |
| 14 | #error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform |
| 15 | #endif |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 16 | |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 17 | /* |
| 18 | * When compiled for ARMv8.1 or later, choose spin locks based on Compare and |
| 19 | * Swap instruction. |
| 20 | */ |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 21 | |
| 22 | /* |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 23 | * Acquire lock using Compare and Swap instruction. |
| 24 | * |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 25 | * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use |
| 26 | * load exclusive semantics to monitor the address and enter WFE. |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 27 | * |
| 28 | * void spin_lock(spinlock_t *lock); |
| 29 | */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 30 | func spin_lock |
| 31 | mov w2, #1 |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 32 | 1: mov w1, wzr |
| 33 | 2: casa w1, w2, [x0] |
| 34 | cbz w1, 3f |
| 35 | ldxr w1, [x0] |
| 36 | cbz w1, 2b |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 37 | wfe |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 38 | b 1b |
| 39 | 3: |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 40 | ret |
| 41 | endfunc spin_lock |
| 42 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 43 | #else /* !USE_SPINLOCK_CAS */ |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 44 | |
| 45 | /* |
| 46 | * Acquire lock using load-/store-exclusive instruction pair. |
| 47 | * |
| 48 | * void spin_lock(spinlock_t *lock); |
| 49 | */ |
| 50 | func spin_lock |
| 51 | mov w2, #1 |
| 52 | sevl |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 53 | l1: wfe |
| 54 | l2: ldaxr w1, [x0] |
| 55 | cbnz w1, l1 |
| 56 | stxr w1, w2, [x0] |
| 57 | cbnz w1, l2 |
| 58 | ret |
| 59 | endfunc spin_lock |
| 60 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 61 | #endif /* USE_SPINLOCK_CAS */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 62 | |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 63 | /* |
| 64 | * Release lock previously acquired by spin_lock. |
| 65 | * |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 66 | * Use store-release to unconditionally clear the spinlock variable. |
| 67 | * Store operation generates an event to all cores waiting in WFE |
| 68 | * when address is monitored by the global monitor. |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 69 | * |
| 70 | * void spin_unlock(spinlock_t *lock); |
| 71 | */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 72 | func spin_unlock |
| 73 | stlr wzr, [x0] |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 74 | ret |
| 75 | endfunc spin_unlock |