Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 1 | /* |
AlexeiFedorov | a13f584 | 2024-03-13 12:16:51 +0000 | [diff] [blame] | 2 | * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <asm_macros.S> |
| 8 | |
| 9 | .globl spin_lock |
| 10 | .globl spin_unlock |
AlexeiFedorov | a13f584 | 2024-03-13 12:16:51 +0000 | [diff] [blame] | 11 | .globl bit_lock |
| 12 | .globl bit_unlock |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 13 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 14 | #if USE_SPINLOCK_CAS |
| 15 | #if !ARM_ARCH_AT_LEAST(8, 1) |
| 16 | #error USE_SPINLOCK_CAS option requires at least an ARMv8.1 platform |
| 17 | #endif |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 18 | |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 19 | /* |
| 20 | * When compiled for ARMv8.1 or later, choose spin locks based on Compare and |
| 21 | * Swap instruction. |
| 22 | */ |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 23 | |
| 24 | /* |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 25 | * Acquire lock using Compare and Swap instruction. |
| 26 | * |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 27 | * Compare for 0 with acquire semantics, and swap 1. If failed to acquire, use |
| 28 | * load exclusive semantics to monitor the address and enter WFE. |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 29 | * |
| 30 | * void spin_lock(spinlock_t *lock); |
| 31 | */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 32 | func spin_lock |
| 33 | mov w2, #1 |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 34 | 1: mov w1, wzr |
| 35 | 2: casa w1, w2, [x0] |
| 36 | cbz w1, 3f |
| 37 | ldxr w1, [x0] |
| 38 | cbz w1, 2b |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 39 | wfe |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 40 | b 1b |
| 41 | 3: |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 42 | ret |
| 43 | endfunc spin_lock |
| 44 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 45 | #else /* !USE_SPINLOCK_CAS */ |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 46 | |
| 47 | /* |
| 48 | * Acquire lock using load-/store-exclusive instruction pair. |
| 49 | * |
| 50 | * void spin_lock(spinlock_t *lock); |
| 51 | */ |
| 52 | func spin_lock |
| 53 | mov w2, #1 |
| 54 | sevl |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 55 | l1: wfe |
| 56 | l2: ldaxr w1, [x0] |
| 57 | cbnz w1, l1 |
| 58 | stxr w1, w2, [x0] |
| 59 | cbnz w1, l2 |
| 60 | ret |
| 61 | endfunc spin_lock |
| 62 | |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 63 | #endif /* USE_SPINLOCK_CAS */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 64 | |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 65 | /* |
| 66 | * Release lock previously acquired by spin_lock. |
| 67 | * |
Soby Mathew | ad04201 | 2019-09-25 14:03:41 +0100 | [diff] [blame] | 68 | * Use store-release to unconditionally clear the spinlock variable. |
| 69 | * Store operation generates an event to all cores waiting in WFE |
| 70 | * when address is monitored by the global monitor. |
Jeenu Viswambharan | fca7680 | 2017-01-16 16:52:35 +0000 | [diff] [blame] | 71 | * |
| 72 | * void spin_unlock(spinlock_t *lock); |
| 73 | */ |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 74 | func spin_unlock |
| 75 | stlr wzr, [x0] |
Soby Mathew | 7b5c9b3 | 2016-08-08 12:42:53 +0100 | [diff] [blame] | 76 | ret |
| 77 | endfunc spin_unlock |
AlexeiFedorov | a13f584 | 2024-03-13 12:16:51 +0000 | [diff] [blame] | 78 | |
| 79 | /* |
| 80 | * Atomic bit clear and set instructions require FEAT_LSE which is |
| 81 | * mandatory from Armv8.1. |
| 82 | */ |
| 83 | #if ARM_ARCH_AT_LEAST(8, 1) |
| 84 | |
| 85 | /* |
| 86 | * Acquire bitlock using atomic bit set on byte. If the original read value |
| 87 | * has the bit set, use load exclusive semantics to monitor the address and |
| 88 | * enter WFE. |
| 89 | * |
| 90 | * void bit_lock(bitlock_t *lock, uint8_t mask); |
| 91 | */ |
| 92 | func bit_lock |
| 93 | 1: ldsetab w1, w2, [x0] |
| 94 | tst w2, w1 |
| 95 | b.eq 2f |
| 96 | ldxrb w2, [x0] |
| 97 | tst w2, w1 |
| 98 | b.eq 1b |
| 99 | wfe |
| 100 | b 1b |
| 101 | 2: |
| 102 | ret |
| 103 | endfunc bit_lock |
| 104 | |
| 105 | /* |
| 106 | * Use atomic bit clear store-release to unconditionally clear bitlock variable. |
| 107 | * Store operation generates an event to all cores waiting in WFE when address |
| 108 | * is monitored by the global monitor. |
| 109 | * |
| 110 | * void bit_unlock(bitlock_t *lock, uint8_t mask); |
| 111 | */ |
| 112 | func bit_unlock |
| 113 | stclrlb w1, [x0] |
| 114 | ret |
| 115 | endfunc bit_unlock |
| 116 | |
| 117 | #endif /* ARM_ARCH_AT_LEAST(8, 1) */ |