Soby Mathew | 748be1d | 2016-05-05 14:10:46 +0100 | [diff] [blame] | 1 | /* |
Govindraj Raja | eee28e7 | 2023-08-01 15:52:40 -0500 | [diff] [blame] | 2 | * Copyright (c) 2016, Arm Limited and Contributors. All rights reserved. |
Soby Mathew | 748be1d | 2016-05-05 14:10:46 +0100 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Soby Mathew | 748be1d | 2016-05-05 14:10:46 +0100 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <asm_macros.S> |
| 8 | |
| 9 | .globl spin_lock |
| 10 | .globl spin_unlock |
| 11 | |
Etienne Carriere | 70b1c2f | 2017-11-05 22:55:47 +0100 | [diff] [blame] | 12 | #if ARM_ARCH_AT_LEAST(8, 0) |
| 13 | /* |
| 14 | * According to the ARMv8-A Architecture Reference Manual, "when the global |
| 15 | * monitor for a PE changes from Exclusive Access state to Open Access state, |
| 16 | * an event is generated.". This applies to both AArch32 and AArch64 modes of |
| 17 | * ARMv8-A. As a result, no explicit SEV with unlock is required. |
| 18 | */ |
| 19 | #define COND_SEV() |
| 20 | #else |
| 21 | #define COND_SEV() sev |
| 22 | #endif |
Soby Mathew | 748be1d | 2016-05-05 14:10:46 +0100 | [diff] [blame] | 23 | |
| 24 | func spin_lock |
| 25 | mov r2, #1 |
| 26 | 1: |
| 27 | ldrex r1, [r0] |
| 28 | cmp r1, #0 |
| 29 | wfene |
| 30 | strexeq r1, r2, [r0] |
| 31 | cmpeq r1, #0 |
| 32 | bne 1b |
| 33 | dmb |
| 34 | bx lr |
| 35 | endfunc spin_lock |
| 36 | |
| 37 | |
| 38 | func spin_unlock |
| 39 | mov r1, #0 |
| 40 | stl r1, [r0] |
Etienne Carriere | 70b1c2f | 2017-11-05 22:55:47 +0100 | [diff] [blame] | 41 | COND_SEV() |
Soby Mathew | 748be1d | 2016-05-05 14:10:46 +0100 | [diff] [blame] | 42 | bx lr |
| 43 | endfunc spin_unlock |