Merge pull request #1460 from robertovargas-arm/clang
Make TF compatible with Clang assembler and linker
diff --git a/Makefile b/Makefile
index 180c558..533cb8a 100644
--- a/Makefile
+++ b/Makefile
@@ -85,7 +85,13 @@
ifneq (${DEBUG}, 0)
BUILD_TYPE := debug
TF_CFLAGS += -g
- ASFLAGS += -g -Wa,--gdwarf-2
+
+ ifneq ($(findstring clang,$(notdir $(CC))),)
+ ASFLAGS += -g
+ else
+ ASFLAGS += -g -Wa,--gdwarf-2
+ endif
+
# Use LOG_LEVEL_INFO by default for debug builds
LOG_LEVEL := 40
else
@@ -119,7 +125,7 @@
CPP := ${CROSS_COMPILE}cpp
AS := ${CROSS_COMPILE}gcc
AR := ${CROSS_COMPILE}ar
-LD := ${CROSS_COMPILE}ld
+LINKER := ${CROSS_COMPILE}ld
OC := ${CROSS_COMPILE}objcopy
OD := ${CROSS_COMPILE}objdump
NM := ${CROSS_COMPILE}nm
@@ -128,8 +134,8 @@
# Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH).
ifneq ($(strip $(wildcard ${LD}.bfd) \
- $(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LD}.bfd))),)
-LD := ${LD}.bfd
+ $(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LINKER}.bfd))),)
+LINKER := ${LINKER}.bfd
endif
ifeq (${ARM_ARCH_MAJOR},7)
@@ -143,12 +149,21 @@
ifeq ($(notdir $(CC)),armclang)
TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive)
TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a
+LD = $(LINKER)
+AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
+CPP = $(CC) -E $(TF_CFLAGS_$(ARCH))
+PP = $(CC) -E $(TF_CFLAGS_$(ARCH))
else ifneq ($(findstring clang,$(notdir $(CC))),)
TF_CFLAGS_aarch32 = $(target32-directive)
TF_CFLAGS_aarch64 = -target aarch64-elf
+LD = $(LINKER)
+AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
+CPP = $(CC) -E
+PP = $(CC) -E
else
TF_CFLAGS_aarch32 = $(march32-directive)
TF_CFLAGS_aarch64 = -march=armv8-a
+LD = $(LINKER)
endif
TF_CFLAGS_aarch32 += -mno-unaligned-access
diff --git a/bl1/aarch64/bl1_exceptions.S b/bl1/aarch64/bl1_exceptions.S
index 7ac028a..cf8a6a7 100644
--- a/bl1/aarch64/bl1_exceptions.S
+++ b/bl1/aarch64/bl1_exceptions.S
@@ -26,25 +26,25 @@
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSP0
+end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSP0
+end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSP0
+end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSP0
+end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
@@ -54,25 +54,25 @@
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSPx
+end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSPx
+end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSPx
+end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSPx
+end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -91,25 +91,25 @@
b.ne unexpected_sync_exception
b smc_handler64
- check_vector_size SynchronousExceptionA64
+end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA64
+end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA64
+end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA64
+end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -119,25 +119,25 @@
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionA32
+end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA32
+end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA32
+end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA32
+end_vector_entry SErrorA32
func smc_handler64
diff --git a/bl1/bl1.ld.S b/bl1/bl1.ld.S
index 26c0ae4..fabe3ef 100644
--- a/bl1/bl1.ld.S
+++ b/bl1/bl1.ld.S
@@ -28,10 +28,19 @@
*bl1_entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >ROM
+ /* .ARM.extab and .ARM.exidx are only added because Clang need them */
+ .ARM.extab . : {
+ *(.ARM.extab* .gnu.linkonce.armextab.*)
+ } >ROM
+
+ .ARM.exidx . : {
+ *(.ARM.exidx* .gnu.linkonce.armexidx.*)
+ } >ROM
+
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
@@ -152,7 +161,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/bl2/aarch64/bl2_el3_exceptions.S b/bl2/aarch64/bl2_el3_exceptions.S
index 987f6e3..07d1040 100644
--- a/bl2/aarch64/bl2_el3_exceptions.S
+++ b/bl2/aarch64/bl2_el3_exceptions.S
@@ -26,25 +26,25 @@
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSP0
+end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSP0
+end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSP0
+end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSP0
+end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
@@ -54,25 +54,25 @@
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSPx
+end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSPx
+end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSPx
+end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSPx
+end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -82,25 +82,25 @@
mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionA64
+end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA64
+end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA64
+end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA64
+end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -110,22 +110,22 @@
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionA32
+end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA32
+end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA32
+end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA32
+end_vector_entry SErrorA32
diff --git a/bl2/bl2.ld.S b/bl2/bl2.ld.S
index 69c22eb..6d26cdb 100644
--- a/bl2/bl2.ld.S
+++ b/bl2/bl2.ld.S
@@ -28,10 +28,19 @@
*bl2_entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
+ /* .ARM.extab and .ARM.exidx are only added because Clang need them */
+ .ARM.extab . : {
+ *(.ARM.extab* .gnu.linkonce.armextab.*)
+ } >RAM
+
+ .ARM.exidx . : {
+ *(.ARM.exidx* .gnu.linkonce.armexidx.*)
+ } >RAM
+
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
@@ -42,7 +51,7 @@
KEEP(*(.img_parser_lib_descs))
__PARSER_LIB_DESCS_END__ = .;
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
@@ -65,7 +74,7 @@
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
@@ -131,7 +140,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/bl2/bl2_el3.ld.S b/bl2/bl2_el3.ld.S
index 0f91edc..82ab427 100644
--- a/bl2/bl2_el3.ld.S
+++ b/bl2/bl2_el3.ld.S
@@ -42,7 +42,7 @@
__TEXT_RESIDENT_END__ = .;
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
#if BL2_IN_XIP_MEM
} >ROM
@@ -69,7 +69,7 @@
KEEP(*(cpu_ops))
__CPU_OPS_END__ = .;
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
#if BL2_IN_XIP_MEM
} >ROM
@@ -111,7 +111,7 @@
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
#if BL2_IN_XIP_MEM
@@ -195,7 +195,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/bl2u/bl2u.ld.S b/bl2u/bl2u.ld.S
index 7b97758..3db5f89 100644
--- a/bl2u/bl2u.ld.S
+++ b/bl2u/bl2u.ld.S
@@ -28,14 +28,23 @@
*bl2u_entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
+ /* .ARM.extab and .ARM.exidx are only added because Clang need them */
+ .ARM.extab . : {
+ *(.ARM.extab* .gnu.linkonce.armextab.*)
+ } >RAM
+
+ .ARM.exidx . : {
+ *(.ARM.exidx* .gnu.linkonce.armexidx.*)
+ } >RAM
+
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
@@ -52,7 +61,7 @@
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
@@ -118,7 +127,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/bl31/aarch64/runtime_exceptions.S b/bl31/aarch64/runtime_exceptions.S
index 346cd3b..12f9f10 100644
--- a/bl31/aarch64/runtime_exceptions.S
+++ b/bl31/aarch64/runtime_exceptions.S
@@ -233,7 +233,7 @@
vector_entry sync_exception_sp_el0
/* We don't expect any synchronous exceptions from EL3 */
b report_unhandled_exception
- check_vector_size sync_exception_sp_el0
+end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0
/*
@@ -241,17 +241,17 @@
* error. Loop infinitely.
*/
b report_unhandled_interrupt
- check_vector_size irq_sp_el0
+end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0
b report_unhandled_interrupt
- check_vector_size fiq_sp_el0
+end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0
b report_unhandled_exception
- check_vector_size serror_sp_el0
+end_vector_entry serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
@@ -265,19 +265,19 @@
* corrupted.
*/
b report_unhandled_exception
- check_vector_size sync_exception_sp_elx
+end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx
b report_unhandled_interrupt
- check_vector_size irq_sp_elx
+end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx
b report_unhandled_interrupt
- check_vector_size fiq_sp_elx
+end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx
b report_unhandled_exception
- check_vector_size serror_sp_elx
+end_vector_entry serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -292,17 +292,17 @@
*/
check_and_unmask_ea
handle_sync_exception
- check_vector_size sync_exception_aarch64
+end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
check_and_unmask_ea
handle_interrupt_exception irq_aarch64
- check_vector_size irq_aarch64
+end_vector_entry irq_aarch64
vector_entry fiq_aarch64
check_and_unmask_ea
handle_interrupt_exception fiq_aarch64
- check_vector_size fiq_aarch64
+end_vector_entry fiq_aarch64
vector_entry serror_aarch64
msr daifclr, #DAIF_ABT_BIT
@@ -313,7 +313,7 @@
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC
- check_vector_size serror_aarch64
+end_vector_entry serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -328,17 +328,17 @@
*/
check_and_unmask_ea
handle_sync_exception
- check_vector_size sync_exception_aarch32
+end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
check_and_unmask_ea
handle_interrupt_exception irq_aarch32
- check_vector_size irq_aarch32
+end_vector_entry irq_aarch32
vector_entry fiq_aarch32
check_and_unmask_ea
handle_interrupt_exception fiq_aarch32
- check_vector_size fiq_aarch32
+end_vector_entry fiq_aarch32
vector_entry serror_aarch32
msr daifclr, #DAIF_ABT_BIT
@@ -349,7 +349,7 @@
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC
- check_vector_size serror_aarch32
+end_vector_entry serror_aarch32
/* ---------------------------------------------------------------------
diff --git a/bl31/bl31.ld.S b/bl31/bl31.ld.S
index 59df9b8..66cb3f3 100644
--- a/bl31/bl31.ld.S
+++ b/bl31/bl31.ld.S
@@ -32,7 +32,7 @@
*bl31_entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
@@ -67,7 +67,7 @@
. = ALIGN(8);
#include <pubsub_events.h>
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
@@ -111,7 +111,7 @@
* executable. No RW data from the next section must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
@@ -131,7 +131,7 @@
spm_shim_exceptions : ALIGN(PAGE_SIZE) {
__SPM_SHIM_EXCEPTIONS_START__ = .;
*(.spm_shim_exceptions)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__SPM_SHIM_EXCEPTIONS_END__ = .;
} >RAM
#endif
@@ -246,7 +246,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/bl32/sp_min/sp_min.ld.S b/bl32/sp_min/sp_min.ld.S
index 71de883..ce6c954 100644
--- a/bl32/sp_min/sp_min.ld.S
+++ b/bl32/sp_min/sp_min.ld.S
@@ -28,10 +28,19 @@
*entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
+ /* .ARM.extab and .ARM.exidx are only added because Clang need them */
+ .ARM.extab . : {
+ *(.ARM.extab* .gnu.linkonce.armextab.*)
+ } >RAM
+
+ .ARM.exidx . : {
+ *(.ARM.exidx* .gnu.linkonce.armexidx.*)
+ } >RAM
+
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
@@ -55,7 +64,7 @@
. = ALIGN(8);
#include <pubsub_events.h>
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
@@ -92,7 +101,7 @@
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory block is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
@@ -207,7 +216,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
diff --git a/bl32/tsp/aarch64/tsp_exceptions.S b/bl32/tsp/aarch64/tsp_exceptions.S
index 4b2ad75..48e358a 100644
--- a/bl32/tsp/aarch64/tsp_exceptions.S
+++ b/bl32/tsp/aarch64/tsp_exceptions.S
@@ -82,19 +82,19 @@
*/
vector_entry sync_exception_sp_el0
b plat_panic_handler
- check_vector_size sync_exception_sp_el0
+end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0
b plat_panic_handler
- check_vector_size irq_sp_el0
+end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0
b plat_panic_handler
- check_vector_size fiq_sp_el0
+end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0
b plat_panic_handler
- check_vector_size serror_sp_el0
+end_vector_entry serror_sp_el0
/* -----------------------------------------------------
@@ -104,19 +104,19 @@
*/
vector_entry sync_exception_sp_elx
b plat_panic_handler
- check_vector_size sync_exception_sp_elx
+end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx
handle_tsp_interrupt irq_sp_elx
- check_vector_size irq_sp_elx
+end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx
handle_tsp_interrupt fiq_sp_elx
- check_vector_size fiq_sp_elx
+end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx
b plat_panic_handler
- check_vector_size serror_sp_elx
+end_vector_entry serror_sp_elx
/* -----------------------------------------------------
@@ -126,19 +126,19 @@
*/
vector_entry sync_exception_aarch64
b plat_panic_handler
- check_vector_size sync_exception_aarch64
+end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
b plat_panic_handler
- check_vector_size irq_aarch64
+end_vector_entry irq_aarch64
vector_entry fiq_aarch64
b plat_panic_handler
- check_vector_size fiq_aarch64
+end_vector_entry fiq_aarch64
vector_entry serror_aarch64
b plat_panic_handler
- check_vector_size serror_aarch64
+end_vector_entry serror_aarch64
/* -----------------------------------------------------
@@ -148,16 +148,16 @@
*/
vector_entry sync_exception_aarch32
b plat_panic_handler
- check_vector_size sync_exception_aarch32
+end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
b plat_panic_handler
- check_vector_size irq_aarch32
+end_vector_entry irq_aarch32
vector_entry fiq_aarch32
b plat_panic_handler
- check_vector_size fiq_aarch32
+end_vector_entry fiq_aarch32
vector_entry serror_aarch32
b plat_panic_handler
- check_vector_size serror_aarch32
+end_vector_entry serror_aarch32
diff --git a/bl32/tsp/tsp.ld.S b/bl32/tsp/tsp.ld.S
index 31c5a67..97b12ce 100644
--- a/bl32/tsp/tsp.ld.S
+++ b/bl32/tsp/tsp.ld.S
@@ -29,14 +29,14 @@
*tsp_entrypoint.o(.text*)
*(.text*)
*(.vectors)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
@@ -52,7 +52,7 @@
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
@@ -117,7 +117,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
diff --git a/common/aarch64/early_exceptions.S b/common/aarch64/early_exceptions.S
index 19cc35d..ba94f6c 100644
--- a/common/aarch64/early_exceptions.S
+++ b/common/aarch64/early_exceptions.S
@@ -24,25 +24,25 @@
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSP0
+end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSP0
+end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSP0
+end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSP0
+end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
@@ -52,25 +52,25 @@
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionSPx
+end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqSPx
+end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqSPx
+end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorSPx
+end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -80,25 +80,25 @@
mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionA64
+end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA64
+end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA64
+end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA64
+end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -108,22 +108,22 @@
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SynchronousExceptionA32
+end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size IrqA32
+end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size FiqA32
+end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
- check_vector_size SErrorA32
+end_vector_entry SErrorA32
diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index 087305d..da26026 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -62,8 +62,8 @@
guidance and a script, which can be used to download Linaro deliverables
automatically.
-Optionally, TF-A can be built using clang or Arm Compiler 6.
-See instructions below on how to switch the default compiler.
+Optionally, TF-A can be built using clang version 4.0 or newer or Arm
+Compiler 6. See instructions below on how to switch the default compiler.
In addition, the following optional packages and tools may be needed:
@@ -103,10 +103,14 @@
export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf-
- It is possible to build TF-A using clang or Arm Compiler 6. To do so
- ``CC`` needs to point to the clang or armclang binary. Only the compiler
- is switched; the assembler and linker need to be provided by the GNU
- toolchain, thus ``CROSS_COMPILE`` should be set as described above.
+ It is possible to build TF-A using Clang or Arm Compiler 6. To do so
+ ``CC`` needs to point to the clang or armclang binary, which will
+ also select the clang or armclang assembler. Be aware that the
+ GNU linker is used by default. In case of being needed the linker
+ can be overriden using the ``LD`` variable. Clang linker version 6 is
+ known to work with TF-A.
+
+ In both cases ``CROSS_COMPILE`` should be set as described above.
Arm Compiler 6 will be selected when the base name of the path assigned
to ``CC`` matches the string 'armclang'.
diff --git a/include/common/aarch64/asm_macros.S b/include/common/aarch64/asm_macros.S
index 5b05045..6e66ea9 100644
--- a/include/common/aarch64/asm_macros.S
+++ b/include/common/aarch64/asm_macros.S
@@ -83,23 +83,31 @@
.section \section_name, "ax"
.align 7, 0
.type \label, %function
- .func \label
.cfi_startproc
\label:
.endm
/*
+ * Add the bytes until fill the full exception vector, whose size is always
+ * 32 instructions. If there are more than 32 instructions in the
+ * exception vector then an error is emitted.
+ */
+ .macro end_vector_entry label
+ .cfi_endproc
+ .fill \label + (32 * 4) - .
+ .endm
+
+ /*
* This macro verifies that the given vector doesn't exceed the
* architectural limit of 32 instructions. This is meant to be placed
* immediately after the last instruction in the vector. It takes the
* vector entry as the parameter
*/
.macro check_vector_size since
- .endfunc
- .cfi_endproc
- .if (. - \since) > (32 * 4)
- .error "Vector exceeds 32 instructions"
- .endif
+#if ERROR_DEPRECATED
+ .error "check_vector_size must not be used. Use end_vector_entry instead"
+#endif
+ end_vector_entry \since
.endm
#if ENABLE_PLAT_COMPAT
diff --git a/include/common/asm_macros_common.S b/include/common/asm_macros_common.S
index ca8c1ad..081addc 100644
--- a/include/common/asm_macros_common.S
+++ b/include/common/asm_macros_common.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -31,7 +31,6 @@
.cfi_sections .debug_frame
.section .text.asm.\_name, "ax"
.type \_name, %function
- .func \_name
/*
* .cfi_startproc and .cfi_endproc are needed to output entries in
* .debug_frame
@@ -45,7 +44,6 @@
* This macro is used to mark the end of a function.
*/
.macro endfunc _name
- .endfunc
.cfi_endproc
.size \_name, . - \_name
.endm
diff --git a/include/lib/cpus/aarch32/cpu_macros.S b/include/lib/cpus/aarch32/cpu_macros.S
index 0f3a572..7703be3 100644
--- a/include/lib/cpus/aarch32/cpu_macros.S
+++ b/include/lib/cpus/aarch32/cpu_macros.S
@@ -35,38 +35,47 @@
# define REPORT_ERRATA 0
#endif
- /*
- * Define the offsets to the fields in cpu_ops structure.
- */
- .struct 0
-CPU_MIDR: /* cpu_ops midr */
- .space 4
-/* Reset fn is needed during reset */
-#if defined(IMAGE_AT_EL3)
-CPU_RESET_FUNC: /* cpu_ops reset_func */
- .space 4
+
+ .equ CPU_MIDR_SIZE, CPU_WORD_SIZE
+ .equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
+ .equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
+ .equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
+
+#ifndef IMAGE_AT_EL3
+ .equ CPU_RESET_FUNC_SIZE, 0
#endif
-#ifdef IMAGE_BL32 /* The power down core and cluster is needed only in BL32 */
-CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
- .space (4 * CPU_MAX_PWR_DWN_OPS)
+
+/* The power down core and cluster is needed only in BL32 */
+#ifndef IMAGE_BL32
+ .equ CPU_PWR_DWN_OPS_SIZE, 0
#endif
-/*
- * Fields required to print errata status. Only in BL32 that the printing
- * require mutual exclusion and printed flag.
- */
-#if REPORT_ERRATA
-CPU_ERRATA_FUNC: /* CPU errata status printing function */
- .space 4
-#if defined(IMAGE_BL32)
-CPU_ERRATA_LOCK:
- .space 4
-CPU_ERRATA_PRINTED:
- .space 4
+/* Fields required to print errata status */
+#if !REPORT_ERRATA
+ .equ CPU_ERRATA_FUNC_SIZE, 0
#endif
+
+/* Only BL32 requires mutual exclusion and printed flag. */
+#if !(REPORT_ERRATA && defined(IMAGE_BL32))
+ .equ CPU_ERRATA_LOCK_SIZE, 0
+ .equ CPU_ERRATA_PRINTED_SIZE, 0
#endif
+
-CPU_OPS_SIZE = .
+/*
+ * Define the offsets to the fields in cpu_ops structure.
+ * Every offset is defined based on the offset and size of the previous
+ * field.
+ */
+ .equ CPU_MIDR, 0
+ .equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
+ .equ CPU_PWR_DWN_OPS, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
+ .equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
+ .equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
+ .equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
+ .equ CPU_OPS_SIZE, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
/*
* Write given expressions as words
@@ -128,21 +137,8 @@
.word \_resetfunc
#endif
#ifdef IMAGE_BL32
-1:
/* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
-2:
- /*
- * Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
- * list
- */
- .ifeq 2b - 1b
- .error "At least one power down function must be specified"
- .else
- .iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
- .error "More than CPU_MAX_PWR_DWN_OPS functions specified"
- .endif
- .endif
#endif
#if REPORT_ERRATA
diff --git a/include/lib/cpus/aarch64/cpu_macros.S b/include/lib/cpus/aarch64/cpu_macros.S
index cd8f3e8..026a48e 100644
--- a/include/lib/cpus/aarch64/cpu_macros.S
+++ b/include/lib/cpus/aarch64/cpu_macros.S
@@ -38,46 +38,56 @@
# define REPORT_ERRATA 0
#endif
- /*
- * Define the offsets to the fields in cpu_ops structure.
- */
- .struct 0
-CPU_MIDR: /* cpu_ops midr */
- .space 8
-/* Reset fn is needed in BL at reset vector */
-#if defined(IMAGE_AT_EL3)
-CPU_RESET_FUNC: /* cpu_ops reset_func */
- .space 8
+
+ .equ CPU_MIDR_SIZE, CPU_WORD_SIZE
+ .equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
+ .equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
+ .equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
+ .equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
+ .equ CPU_REG_DUMP_SIZE, CPU_WORD_SIZE
+
+#ifndef IMAGE_AT_EL3
+ .equ CPU_RESET_FUNC_SIZE, 0
#endif
-CPU_EXTRA1_FUNC:
- .space 8
-CPU_EXTRA2_FUNC:
- .space 8
-#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
-CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
- .space (8 * CPU_MAX_PWR_DWN_OPS)
+
+/* The power down core and cluster is needed only in BL31 */
+#ifndef IMAGE_BL31
+ .equ CPU_PWR_DWN_OPS_SIZE, 0
#endif
-/*
- * Fields required to print errata status. Only in BL31 that the printing
- * require mutual exclusion and printed flag.
- */
-#if REPORT_ERRATA
-CPU_ERRATA_FUNC:
- .space 8
-#if defined(IMAGE_BL31)
-CPU_ERRATA_LOCK:
- .space 8
-CPU_ERRATA_PRINTED:
- .space 8
+/* Fields required to print errata status. */
+#if !REPORT_ERRATA
+ .equ CPU_ERRATA_FUNC_SIZE, 0
#endif
+
+/* Only BL31 requieres mutual exclusion and printed flag. */
+#if !(REPORT_ERRATA && defined(IMAGE_BL31))
+ .equ CPU_ERRATA_LOCK_SIZE, 0
+ .equ CPU_ERRATA_PRINTED_SIZE, 0
#endif
-#if defined(IMAGE_BL31) && CRASH_REPORTING
-CPU_REG_DUMP: /* cpu specific register dump for crash reporting */
- .space 8
+#if !defined(IMAGE_BL31) || !CRASH_REPORTING
+ .equ CPU_REG_DUMP_SIZE, 0
#endif
-CPU_OPS_SIZE = .
+
+/*
+ * Define the offsets to the fields in cpu_ops structure.
+ * Every offset is defined based in the offset and size of the previous
+ * field.
+ */
+ .equ CPU_MIDR, 0
+ .equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
+ .equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
+ .equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
+ .equ CPU_PWR_DWN_OPS, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
+ .equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
+ .equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
+ .equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
+ .equ CPU_REG_DUMP, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
+ .equ CPU_OPS_SIZE, CPU_REG_DUMP + CPU_REG_DUMP_SIZE
/*
* Write given expressions as quad words
@@ -149,21 +159,8 @@
.quad \_extra1
.quad \_extra2
#ifdef IMAGE_BL31
-1:
/* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
-2:
- /*
- * Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
- * list
- */
- .ifeq 2b - 1b
- .error "At least one power down function must be specified"
- .else
- .iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
- .error "More than CPU_MAX_PWR_DWN_OPS functions specified"
- .endif
- .endif
#endif
#if REPORT_ERRATA
diff --git a/include/plat/arm/common/aarch64/arm_macros.S b/include/plat/arm/common/aarch64/arm_macros.S
index 12bf734..7953d7e 100644
--- a/include/plat/arm/common/aarch64/arm_macros.S
+++ b/include/plat/arm/common/aarch64/arm_macros.S
@@ -22,8 +22,7 @@
/* Registers common to both GICv2 and GICv3 */
gicd_pend_reg:
- .asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n" \
- " Offset:\t\t\tvalue\n"
+ .asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n Offset:\t\t\tvalue\n"
newline:
.asciz "\n"
spacer:
diff --git a/include/plat/arm/common/arm_common.ld.S b/include/plat/arm/common/arm_common.ld.S
index 6edfa09..3f6e29b 100644
--- a/include/plat/arm/common/arm_common.ld.S
+++ b/include/plat/arm/common/arm_common.ld.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -22,7 +22,7 @@
*(arm_el3_tzc_dram)
__EL3_SEC_DRAM_UNALIGNED_END__ = .;
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__EL3_SEC_DRAM_END__ = .;
} >EL3_SEC_DRAM
}
diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S
index 14705d7..51d0b15 100644
--- a/lib/cpus/aarch64/cortex_a76.S
+++ b/lib/cpus/aarch64/cortex_a76.S
@@ -107,19 +107,19 @@
*/
vector_entry cortex_a76_sync_exception_sp_el0
b sync_exception_sp_el0
- check_vector_size cortex_a76_sync_exception_sp_el0
+end_vector_entry cortex_a76_sync_exception_sp_el0
vector_entry cortex_a76_irq_sp_el0
b irq_sp_el0
- check_vector_size cortex_a76_irq_sp_el0
+end_vector_entry cortex_a76_irq_sp_el0
vector_entry cortex_a76_fiq_sp_el0
b fiq_sp_el0
- check_vector_size cortex_a76_fiq_sp_el0
+end_vector_entry cortex_a76_fiq_sp_el0
vector_entry cortex_a76_serror_sp_el0
b serror_sp_el0
- check_vector_size cortex_a76_serror_sp_el0
+end_vector_entry cortex_a76_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
@@ -127,19 +127,19 @@
*/
vector_entry cortex_a76_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size cortex_a76_sync_exception_sp_elx
+end_vector_entry cortex_a76_sync_exception_sp_elx
vector_entry cortex_a76_irq_sp_elx
b irq_sp_elx
- check_vector_size cortex_a76_irq_sp_elx
+end_vector_entry cortex_a76_irq_sp_elx
vector_entry cortex_a76_fiq_sp_elx
b fiq_sp_elx
- check_vector_size cortex_a76_fiq_sp_elx
+end_vector_entry cortex_a76_fiq_sp_elx
vector_entry cortex_a76_serror_sp_elx
b serror_sp_elx
- check_vector_size cortex_a76_serror_sp_elx
+end_vector_entry cortex_a76_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -148,22 +148,22 @@
vector_entry cortex_a76_sync_exception_aarch64
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
b sync_exception_aarch64
- check_vector_size cortex_a76_sync_exception_aarch64
+end_vector_entry cortex_a76_sync_exception_aarch64
vector_entry cortex_a76_irq_aarch64
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b irq_aarch64
- check_vector_size cortex_a76_irq_aarch64
+end_vector_entry cortex_a76_irq_aarch64
vector_entry cortex_a76_fiq_aarch64
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b fiq_aarch64
- check_vector_size cortex_a76_fiq_aarch64
+end_vector_entry cortex_a76_fiq_aarch64
vector_entry cortex_a76_serror_aarch64
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b serror_aarch64
- check_vector_size cortex_a76_serror_aarch64
+end_vector_entry cortex_a76_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -172,22 +172,22 @@
vector_entry cortex_a76_sync_exception_aarch32
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
b sync_exception_aarch32
- check_vector_size cortex_a76_sync_exception_aarch32
+end_vector_entry cortex_a76_sync_exception_aarch32
vector_entry cortex_a76_irq_aarch32
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b irq_aarch32
- check_vector_size cortex_a76_irq_aarch32
+end_vector_entry cortex_a76_irq_aarch32
vector_entry cortex_a76_fiq_aarch32
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b fiq_aarch32
- check_vector_size cortex_a76_fiq_aarch32
+end_vector_entry cortex_a76_fiq_aarch32
vector_entry cortex_a76_serror_aarch32
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b serror_aarch32
- check_vector_size cortex_a76_serror_aarch32
+end_vector_entry cortex_a76_serror_aarch32
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
diff --git a/lib/cpus/aarch64/denver.S b/lib/cpus/aarch64/denver.S
index aee4fee..f04dbd6 100644
--- a/lib/cpus/aarch64/denver.S
+++ b/lib/cpus/aarch64/denver.S
@@ -55,19 +55,19 @@
*/
vector_entry workaround_bpflush_sync_exception_sp_el0
b sync_exception_sp_el0
- check_vector_size workaround_bpflush_sync_exception_sp_el0
+end_vector_entry workaround_bpflush_sync_exception_sp_el0
vector_entry workaround_bpflush_irq_sp_el0
b irq_sp_el0
- check_vector_size workaround_bpflush_irq_sp_el0
+end_vector_entry workaround_bpflush_irq_sp_el0
vector_entry workaround_bpflush_fiq_sp_el0
b fiq_sp_el0
- check_vector_size workaround_bpflush_fiq_sp_el0
+end_vector_entry workaround_bpflush_fiq_sp_el0
vector_entry workaround_bpflush_serror_sp_el0
b serror_sp_el0
- check_vector_size workaround_bpflush_serror_sp_el0
+end_vector_entry workaround_bpflush_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
@@ -75,19 +75,19 @@
*/
vector_entry workaround_bpflush_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size workaround_bpflush_sync_exception_sp_elx
+end_vector_entry workaround_bpflush_sync_exception_sp_elx
vector_entry workaround_bpflush_irq_sp_elx
b irq_sp_elx
- check_vector_size workaround_bpflush_irq_sp_elx
+end_vector_entry workaround_bpflush_irq_sp_elx
vector_entry workaround_bpflush_fiq_sp_elx
b fiq_sp_elx
- check_vector_size workaround_bpflush_fiq_sp_elx
+end_vector_entry workaround_bpflush_fiq_sp_elx
vector_entry workaround_bpflush_serror_sp_elx
b serror_sp_elx
- check_vector_size workaround_bpflush_serror_sp_elx
+end_vector_entry workaround_bpflush_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -96,22 +96,22 @@
vector_entry workaround_bpflush_sync_exception_aarch64
apply_workaround
b sync_exception_aarch64
- check_vector_size workaround_bpflush_sync_exception_aarch64
+end_vector_entry workaround_bpflush_sync_exception_aarch64
vector_entry workaround_bpflush_irq_aarch64
apply_workaround
b irq_aarch64
- check_vector_size workaround_bpflush_irq_aarch64
+end_vector_entry workaround_bpflush_irq_aarch64
vector_entry workaround_bpflush_fiq_aarch64
apply_workaround
b fiq_aarch64
- check_vector_size workaround_bpflush_fiq_aarch64
+end_vector_entry workaround_bpflush_fiq_aarch64
vector_entry workaround_bpflush_serror_aarch64
apply_workaround
b serror_aarch64
- check_vector_size workaround_bpflush_serror_aarch64
+end_vector_entry workaround_bpflush_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -120,22 +120,22 @@
vector_entry workaround_bpflush_sync_exception_aarch32
apply_workaround
b sync_exception_aarch32
- check_vector_size workaround_bpflush_sync_exception_aarch32
+end_vector_entry workaround_bpflush_sync_exception_aarch32
vector_entry workaround_bpflush_irq_aarch32
apply_workaround
b irq_aarch32
- check_vector_size workaround_bpflush_irq_aarch32
+end_vector_entry workaround_bpflush_irq_aarch32
vector_entry workaround_bpflush_fiq_aarch32
apply_workaround
b fiq_aarch32
- check_vector_size workaround_bpflush_fiq_aarch32
+end_vector_entry workaround_bpflush_fiq_aarch32
vector_entry workaround_bpflush_serror_aarch32
apply_workaround
b serror_aarch32
- check_vector_size workaround_bpflush_serror_aarch32
+end_vector_entry workaround_bpflush_serror_aarch32
.global denver_disable_dco
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
index 8437155..c613ebd 100644
--- a/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
@@ -114,19 +114,19 @@
.word EMIT_BPIALL
.word EMIT_SMC
- check_vector_size bpiall_sync_exception_sp_el0
+end_vector_entry bpiall_sync_exception_sp_el0
vector_entry bpiall_irq_sp_el0
b irq_sp_el0
- check_vector_size bpiall_irq_sp_el0
+end_vector_entry bpiall_irq_sp_el0
vector_entry bpiall_fiq_sp_el0
b fiq_sp_el0
- check_vector_size bpiall_fiq_sp_el0
+end_vector_entry bpiall_fiq_sp_el0
vector_entry bpiall_serror_sp_el0
b serror_sp_el0
- check_vector_size bpiall_serror_sp_el0
+end_vector_entry bpiall_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
@@ -134,19 +134,19 @@
*/
vector_entry bpiall_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size bpiall_sync_exception_sp_elx
+end_vector_entry bpiall_sync_exception_sp_elx
vector_entry bpiall_irq_sp_elx
b irq_sp_elx
- check_vector_size bpiall_irq_sp_elx
+end_vector_entry bpiall_irq_sp_elx
vector_entry bpiall_fiq_sp_elx
b fiq_sp_elx
- check_vector_size bpiall_fiq_sp_elx
+end_vector_entry bpiall_fiq_sp_elx
vector_entry bpiall_serror_sp_elx
b serror_sp_elx
- check_vector_size bpiall_serror_sp_elx
+end_vector_entry bpiall_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -154,19 +154,19 @@
*/
vector_entry bpiall_sync_exception_aarch64
apply_cve_2017_5715_wa 1
- check_vector_size bpiall_sync_exception_aarch64
+end_vector_entry bpiall_sync_exception_aarch64
vector_entry bpiall_irq_aarch64
apply_cve_2017_5715_wa 2
- check_vector_size bpiall_irq_aarch64
+end_vector_entry bpiall_irq_aarch64
vector_entry bpiall_fiq_aarch64
apply_cve_2017_5715_wa 4
- check_vector_size bpiall_fiq_aarch64
+end_vector_entry bpiall_fiq_aarch64
vector_entry bpiall_serror_aarch64
apply_cve_2017_5715_wa 8
- check_vector_size bpiall_serror_aarch64
+end_vector_entry bpiall_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -174,19 +174,19 @@
*/
vector_entry bpiall_sync_exception_aarch32
apply_cve_2017_5715_wa 1
- check_vector_size bpiall_sync_exception_aarch32
+end_vector_entry bpiall_sync_exception_aarch32
vector_entry bpiall_irq_aarch32
apply_cve_2017_5715_wa 2
- check_vector_size bpiall_irq_aarch32
+end_vector_entry bpiall_irq_aarch32
vector_entry bpiall_fiq_aarch32
apply_cve_2017_5715_wa 4
- check_vector_size bpiall_fiq_aarch32
+end_vector_entry bpiall_fiq_aarch32
vector_entry bpiall_serror_aarch32
apply_cve_2017_5715_wa 8
- check_vector_size bpiall_serror_aarch32
+end_vector_entry bpiall_serror_aarch32
/* ---------------------------------------------------------------------
* This vector table is used while the workaround is executing. It
@@ -203,19 +203,19 @@
*/
vector_entry bpiall_ret_sync_exception_sp_el0
b report_unhandled_exception
- check_vector_size bpiall_ret_sync_exception_sp_el0
+end_vector_entry bpiall_ret_sync_exception_sp_el0
vector_entry bpiall_ret_irq_sp_el0
b report_unhandled_interrupt
- check_vector_size bpiall_ret_irq_sp_el0
+end_vector_entry bpiall_ret_irq_sp_el0
vector_entry bpiall_ret_fiq_sp_el0
b report_unhandled_interrupt
- check_vector_size bpiall_ret_fiq_sp_el0
+end_vector_entry bpiall_ret_fiq_sp_el0
vector_entry bpiall_ret_serror_sp_el0
b report_unhandled_exception
- check_vector_size bpiall_ret_serror_sp_el0
+end_vector_entry bpiall_ret_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
@@ -223,19 +223,19 @@
*/
vector_entry bpiall_ret_sync_exception_sp_elx
b report_unhandled_exception
- check_vector_size bpiall_ret_sync_exception_sp_elx
+end_vector_entry bpiall_ret_sync_exception_sp_elx
vector_entry bpiall_ret_irq_sp_elx
b report_unhandled_interrupt
- check_vector_size bpiall_ret_irq_sp_elx
+end_vector_entry bpiall_ret_irq_sp_elx
vector_entry bpiall_ret_fiq_sp_elx
b report_unhandled_interrupt
- check_vector_size bpiall_ret_fiq_sp_elx
+end_vector_entry bpiall_ret_fiq_sp_elx
vector_entry bpiall_ret_serror_sp_elx
b report_unhandled_exception
- check_vector_size bpiall_ret_serror_sp_elx
+end_vector_entry bpiall_ret_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
@@ -243,19 +243,19 @@
*/
vector_entry bpiall_ret_sync_exception_aarch64
b report_unhandled_exception
- check_vector_size bpiall_ret_sync_exception_aarch64
+end_vector_entry bpiall_ret_sync_exception_aarch64
vector_entry bpiall_ret_irq_aarch64
b report_unhandled_interrupt
- check_vector_size bpiall_ret_irq_aarch64
+end_vector_entry bpiall_ret_irq_aarch64
vector_entry bpiall_ret_fiq_aarch64
b report_unhandled_interrupt
- check_vector_size bpiall_ret_fiq_aarch64
+end_vector_entry bpiall_ret_fiq_aarch64
vector_entry bpiall_ret_serror_aarch64
b report_unhandled_exception
- check_vector_size bpiall_ret_serror_aarch64
+end_vector_entry bpiall_ret_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -324,7 +324,7 @@
1:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b sync_exception_aarch64
- check_vector_size bpiall_ret_sync_exception_aarch32
+end_vector_entry bpiall_ret_sync_exception_aarch32
vector_entry bpiall_ret_irq_aarch32
b report_unhandled_interrupt
@@ -346,12 +346,12 @@
bpiall_ret_serror:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b serror_aarch64
- check_vector_size bpiall_ret_irq_aarch32
+end_vector_entry bpiall_ret_irq_aarch32
vector_entry bpiall_ret_fiq_aarch32
b report_unhandled_interrupt
- check_vector_size bpiall_ret_fiq_aarch32
+end_vector_entry bpiall_ret_fiq_aarch32
vector_entry bpiall_ret_serror_aarch32
b report_unhandled_exception
- check_vector_size bpiall_ret_serror_aarch32
+end_vector_entry bpiall_ret_serror_aarch32
diff --git a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
index a556d1f..d7b6e26 100644
--- a/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
@@ -66,19 +66,19 @@
*/
vector_entry mmu_sync_exception_sp_el0
b sync_exception_sp_el0
- check_vector_size mmu_sync_exception_sp_el0
+end_vector_entry mmu_sync_exception_sp_el0
vector_entry mmu_irq_sp_el0
b irq_sp_el0
- check_vector_size mmu_irq_sp_el0
+end_vector_entry mmu_irq_sp_el0
vector_entry mmu_fiq_sp_el0
b fiq_sp_el0
- check_vector_size mmu_fiq_sp_el0
+end_vector_entry mmu_fiq_sp_el0
vector_entry mmu_serror_sp_el0
b serror_sp_el0
- check_vector_size mmu_serror_sp_el0
+end_vector_entry mmu_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
@@ -86,19 +86,19 @@
*/
vector_entry mmu_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size mmu_sync_exception_sp_elx
+end_vector_entry mmu_sync_exception_sp_elx
vector_entry mmu_irq_sp_elx
b irq_sp_elx
- check_vector_size mmu_irq_sp_elx
+end_vector_entry mmu_irq_sp_elx
vector_entry mmu_fiq_sp_elx
b fiq_sp_elx
- check_vector_size mmu_fiq_sp_elx
+end_vector_entry mmu_fiq_sp_elx
vector_entry mmu_serror_sp_elx
b serror_sp_elx
- check_vector_size mmu_serror_sp_elx
+end_vector_entry mmu_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
@@ -107,22 +107,22 @@
vector_entry mmu_sync_exception_aarch64
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
b sync_exception_aarch64
- check_vector_size mmu_sync_exception_aarch64
+end_vector_entry mmu_sync_exception_aarch64
vector_entry mmu_irq_aarch64
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b irq_aarch64
- check_vector_size mmu_irq_aarch64
+end_vector_entry mmu_irq_aarch64
vector_entry mmu_fiq_aarch64
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b fiq_aarch64
- check_vector_size mmu_fiq_aarch64
+end_vector_entry mmu_fiq_aarch64
vector_entry mmu_serror_aarch64
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
b serror_aarch64
- check_vector_size mmu_serror_aarch64
+end_vector_entry mmu_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -131,19 +131,19 @@
vector_entry mmu_sync_exception_aarch32
apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
b sync_exception_aarch32
- check_vector_size mmu_sync_exception_aarch32
+end_vector_entry mmu_sync_exception_aarch32
vector_entry mmu_irq_aarch32
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b irq_aarch32
- check_vector_size mmu_irq_aarch32
+end_vector_entry mmu_irq_aarch32
vector_entry mmu_fiq_aarch32
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b fiq_aarch32
- check_vector_size mmu_fiq_aarch32
+end_vector_entry mmu_fiq_aarch32
vector_entry mmu_serror_aarch32
apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
b serror_aarch32
- check_vector_size mmu_serror_aarch32
+end_vector_entry mmu_serror_aarch32
diff --git a/plat/mediatek/mt6795/bl31.ld.S b/plat/mediatek/mt6795/bl31.ld.S
index 0fbd3f7..8f391df 100644
--- a/plat/mediatek/mt6795/bl31.ld.S
+++ b/plat/mediatek/mt6795/bl31.ld.S
@@ -59,7 +59,7 @@
* executable. No RW data from the next section must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
@@ -161,7 +161,7 @@
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
- . = NEXT(PAGE_SIZE);
+ . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM2
#endif
diff --git a/plat/rockchip/common/pmusram/pmu_sram_cpus_on.S b/plat/rockchip/common/pmusram/pmu_sram_cpus_on.S
index 5a1854b..991fe6c 100644
--- a/plat/rockchip/common/pmusram/pmu_sram_cpus_on.S
+++ b/plat/rockchip/common/pmusram/pmu_sram_cpus_on.S
@@ -12,7 +12,6 @@
.macro pmusram_entry_func _name
.section .pmusram.entry, "ax"
.type \_name, %function
- .func \_name
.cfi_startproc
\_name:
.endm
diff --git a/plat/rockchip/rk3399/drivers/pmu/plat_pmu_macros.S b/plat/rockchip/rk3399/drivers/pmu/plat_pmu_macros.S
index 70fd9bf..546c09a 100644
--- a/plat/rockchip/rk3399/drivers/pmu/plat_pmu_macros.S
+++ b/plat/rockchip/rk3399/drivers/pmu/plat_pmu_macros.S
@@ -15,7 +15,6 @@
.cfi_sections .debug_frame
.section .sram.text, "ax"
.type \_name, %function
- .func \_name
.cfi_startproc
\_name:
.endm
diff --git a/services/std_svc/spm/aarch64/spm_shim_exceptions.S b/services/std_svc/spm/aarch64/spm_shim_exceptions.S
index 218245d..9c218df 100644
--- a/services/std_svc/spm/aarch64/spm_shim_exceptions.S
+++ b/services/std_svc/spm/aarch64/spm_shim_exceptions.S
@@ -23,19 +23,19 @@
*/
vector_entry SynchronousExceptionSP0, .spm_shim_exceptions
b .
- check_vector_size SynchronousExceptionSP0
+end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0, .spm_shim_exceptions
b .
- check_vector_size IrqSP0
+end_vector_entry IrqSP0
vector_entry FiqSP0, .spm_shim_exceptions
b .
- check_vector_size FiqSP0
+end_vector_entry FiqSP0
vector_entry SErrorSP0, .spm_shim_exceptions
b .
- check_vector_size SErrorSP0
+end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
@@ -43,19 +43,19 @@
*/
vector_entry SynchronousExceptionSPx, .spm_shim_exceptions
b .
- check_vector_size SynchronousExceptionSPx
+end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx, .spm_shim_exceptions
b .
- check_vector_size IrqSPx
+end_vector_entry IrqSPx
vector_entry FiqSPx, .spm_shim_exceptions
b .
- check_vector_size FiqSPx
+end_vector_entry FiqSPx
vector_entry SErrorSPx, .spm_shim_exceptions
b .
- check_vector_size SErrorSPx
+end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600. No exceptions
@@ -93,19 +93,19 @@
handle_sys_trap:
panic:
b panic
- check_vector_size SynchronousExceptionA64
+end_vector_entry SynchronousExceptionA64
vector_entry IrqA64, .spm_shim_exceptions
b .
- check_vector_size IrqA64
+end_vector_entry IrqA64
vector_entry FiqA64, .spm_shim_exceptions
b .
- check_vector_size FiqA64
+end_vector_entry FiqA64
vector_entry SErrorA64, .spm_shim_exceptions
b .
- check_vector_size SErrorA64
+end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
@@ -113,16 +113,16 @@
*/
vector_entry SynchronousExceptionA32, .spm_shim_exceptions
b .
- check_vector_size SynchronousExceptionA32
+end_vector_entry SynchronousExceptionA32
vector_entry IrqA32, .spm_shim_exceptions
b .
- check_vector_size IrqA32
+end_vector_entry IrqA32
vector_entry FiqA32, .spm_shim_exceptions
b .
- check_vector_size FiqA32
+end_vector_entry FiqA32
vector_entry SErrorA32, .spm_shim_exceptions
b .
- check_vector_size SErrorA32
+end_vector_entry SErrorA32