Chee Hong Ang | bb272ed | 2020-12-24 18:20:58 +0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 Intel Corporation. All rights reserved |
| 3 | * |
| 4 | * SPDX-License-Identifier: GPL-2.0 |
| 5 | */ |
| 6 | |
| 7 | #include <asm-offsets.h> |
| 8 | #include <config.h> |
| 9 | #include <linux/linkage.h> |
| 10 | #include <asm/macro.h> |
| 11 | |
| 12 | ENTRY(lowlevel_init) |
| 13 | mov x29, lr /* Save LR */ |
| 14 | |
| 15 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
| 16 | #if defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_ATF) |
| 17 | wait_for_atf: |
| 18 | ldr x4, =CPU_RELEASE_ADDR |
| 19 | ldr x5, [x4] |
| 20 | cbz x5, slave_wait_atf |
| 21 | br x5 |
| 22 | slave_wait_atf: |
| 23 | branch_if_slave x0, wait_for_atf |
| 24 | #else |
| 25 | branch_if_slave x0, 1f |
| 26 | #endif |
| 27 | ldr x0, =GICD_BASE |
| 28 | bl gic_init_secure |
| 29 | 1: |
| 30 | #if defined(CONFIG_GICV3) |
| 31 | ldr x0, =GICR_BASE |
| 32 | bl gic_init_secure_percpu |
| 33 | #elif defined(CONFIG_GICV2) |
| 34 | ldr x0, =GICD_BASE |
| 35 | ldr x1, =GICC_BASE |
| 36 | bl gic_init_secure_percpu |
| 37 | #endif |
| 38 | #endif |
| 39 | |
| 40 | #ifdef CONFIG_ARMV8_MULTIENTRY |
Andre Przywara | 93b9ce7 | 2022-02-11 11:29:39 +0000 | [diff] [blame] | 41 | branch_if_master x0, 2f |
Chee Hong Ang | bb272ed | 2020-12-24 18:20:58 +0800 | [diff] [blame] | 42 | |
| 43 | /* |
| 44 | * Slave should wait for master clearing spin table. |
| 45 | * This sync prevent slaves observing incorrect |
| 46 | * value of spin table and jumping to wrong place. |
| 47 | */ |
| 48 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
| 49 | #ifdef CONFIG_GICV2 |
| 50 | ldr x0, =GICC_BASE |
| 51 | #endif |
| 52 | bl gic_wait_for_interrupt |
| 53 | #endif |
| 54 | |
| 55 | /* |
| 56 | * All slaves will enter EL2 and optionally EL1. |
| 57 | */ |
| 58 | adr x4, lowlevel_in_el2 |
| 59 | ldr x5, =ES_TO_AARCH64 |
| 60 | bl armv8_switch_to_el2 |
| 61 | |
| 62 | lowlevel_in_el2: |
| 63 | #ifdef CONFIG_ARMV8_SWITCH_TO_EL1 |
| 64 | adr x4, lowlevel_in_el1 |
| 65 | ldr x5, =ES_TO_AARCH64 |
| 66 | bl armv8_switch_to_el1 |
| 67 | |
| 68 | lowlevel_in_el1: |
| 69 | #endif |
| 70 | |
| 71 | #endif /* CONFIG_ARMV8_MULTIENTRY */ |
| 72 | |
| 73 | 2: |
| 74 | mov lr, x29 /* Restore LR */ |
| 75 | ret |
| 76 | ENDPROC(lowlevel_init) |