blob: 78a9ffa6dfa9880b0d362c862f519c4657748750 [file] [log] [blame]
Jeenu Viswambharan5c503042017-05-26 14:15:40 +01001/*
Antonio Nino Diazfeacba32018-08-21 16:12:29 +01002 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
Jeenu Viswambharan5c503042017-05-26 14:15:40 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Daniel Boulby844b4872018-09-18 13:36:39 +01007#include <cdefs.h>
Jeenu Viswambharan5c503042017-05-26 14:15:40 +01008#include <mmio.h>
9#include <smmu_v3.h>
Antonio Nino Diazfeacba32018-08-21 16:12:29 +010010#include <stdbool.h>
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010011
Daniel Boulby844b4872018-09-18 13:36:39 +010012static inline uint32_t __init smmuv3_read_s_idr1(uintptr_t base)
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010013{
14 return mmio_read_32(base + SMMU_S_IDR1);
15}
16
Daniel Boulby844b4872018-09-18 13:36:39 +010017static inline uint32_t __init smmuv3_read_s_init(uintptr_t base)
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010018{
19 return mmio_read_32(base + SMMU_S_INIT);
20}
21
Daniel Boulby844b4872018-09-18 13:36:39 +010022static inline void __init smmuv3_write_s_init(uintptr_t base, uint32_t value)
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010023{
24 mmio_write_32(base + SMMU_S_INIT, value);
25}
26
Antonio Nino Diazfeacba32018-08-21 16:12:29 +010027/* Test for pending invalidate */
28static inline bool smmuv3_inval_pending(uintptr_t base)
29{
30 return (smmuv3_read_s_init(base) & SMMU_S_INIT_INV_ALL_MASK) != 0U;
31}
32
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010033/*
34 * Initialize the SMMU by invalidating all secure caches and TLBs.
35 *
36 * Returns 0 on success, and -1 on failure.
37 */
Daniel Boulby844b4872018-09-18 13:36:39 +010038int __init smmuv3_init(uintptr_t smmu_base)
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010039{
40 uint32_t idr1_reg;
41
42 /*
43 * Invalidation of secure caches and TLBs is required only if the SMMU
44 * supports secure state. If not, it's implementation defined as to how
45 * SMMU_S_INIT register is accessed.
46 */
47 idr1_reg = smmuv3_read_s_idr1(smmu_base);
Antonio Nino Diazfeacba32018-08-21 16:12:29 +010048 if (((idr1_reg >> SMMU_S_IDR1_SECURE_IMPL_SHIFT) &
49 SMMU_S_IDR1_SECURE_IMPL_MASK) == 0U) {
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010050 return -1;
51 }
52
53 /* Initiate invalidation, and wait for it to finish */
54 smmuv3_write_s_init(smmu_base, SMMU_S_INIT_INV_ALL_MASK);
Antonio Nino Diazfeacba32018-08-21 16:12:29 +010055 while (smmuv3_inval_pending(smmu_base))
Jeenu Viswambharan5c503042017-05-26 14:15:40 +010056 ;
57
58 return 0;
59}