Remove build option `ASM_ASSERTION`

The build option `ENABLE_ASSERTIONS` should be used instead. That way
both C and ASM assertions can be enabled or disabled together.

All occurrences of `ASM_ASSERTION` in common code and ARM platforms have
been replaced by `ENABLE_ASSERTIONS`.

ASM_ASSERTION has been removed from the user guide.

Change-Id: I51f1991f11b9b7ff83e787c9a3270c274748ec6f
Signed-off-by: Antonio Nino Diaz <antonio.ninodiaz@arm.com>
diff --git a/lib/aarch32/misc_helpers.S b/lib/aarch32/misc_helpers.S
index 5b17c21..03b47ea 100644
--- a/lib/aarch32/misc_helpers.S
+++ b/lib/aarch32/misc_helpers.S
@@ -162,7 +162,7 @@
  * --------------------------------------------------------------------------
  */
 func memcpy4
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	orr	r3, r0, r1
 	tst	r3, #0x3
 	ASM_ASSERT(eq)
diff --git a/lib/aarch64/misc_helpers.S b/lib/aarch64/misc_helpers.S
index 84265e0..74550aa 100644
--- a/lib/aarch64/misc_helpers.S
+++ b/lib/aarch64/misc_helpers.S
@@ -215,7 +215,7 @@
 	tmp1         .req x4
 	tmp2         .req x5
 
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	/*
 	 * Check for M bit (MMU enabled) of the current SCTLR_EL(1|3)
 	 * register value and panic if the MMU is disabled.
@@ -228,7 +228,7 @@
 
 	tst	tmp1, #SCTLR_M_BIT
 	ASM_ASSERT(ne)
-#endif /* ASM_ASSERTION */
+#endif /* ENABLE_ASSERTIONS */
 
 	/* stop_address is the address past the last to zero */
 	add	stop_address, cursor, length
@@ -247,7 +247,7 @@
 	mov	tmp2, #(1 << 2)
 	lsl	block_size, tmp2, block_size
 
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	/*
 	 * Assumes block size is at least 16 bytes to avoid manual realignment
 	 * of the cursor at the end of the DCZVA loop.
@@ -444,7 +444,7 @@
  * --------------------------------------------------------------------------
  */
 func memcpy16
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	orr	x3, x0, x1
 	tst	x3, #0xf
 	ASM_ASSERT(eq)
diff --git a/lib/cpus/aarch32/aem_generic.S b/lib/cpus/aarch32/aem_generic.S
index 3d6064c..7374e25 100644
--- a/lib/cpus/aarch32/aem_generic.S
+++ b/lib/cpus/aarch32/aem_generic.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -35,7 +35,7 @@
 
 func aem_generic_core_pwr_dwn
 	/* Assert if cache is enabled */
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	ldcopr	r0, SCTLR
 	tst	r0, #SCTLR_C_BIT
 	ASM_ASSERT(eq)
@@ -51,7 +51,7 @@
 
 func aem_generic_cluster_pwr_dwn
 	/* Assert if cache is enabled */
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	ldcopr	r0, SCTLR
 	tst	r0, #SCTLR_C_BIT
 	ASM_ASSERT(eq)
diff --git a/lib/cpus/aarch32/cortex_a32.S b/lib/cpus/aarch32/cortex_a32.S
index f631c4c..8cd7933 100644
--- a/lib/cpus/aarch32/cortex_a32.S
+++ b/lib/cpus/aarch32/cortex_a32.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -76,7 +76,7 @@
 	push	{r12, lr}
 
 	/* Assert if cache is enabled */
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	ldcopr	r0, SCTLR
 	tst	r0, #SCTLR_C_BIT
 	ASM_ASSERT(eq)
@@ -107,7 +107,7 @@
 	push	{r12, lr}
 
 	/* Assert if cache is enabled */
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	ldcopr	r0, SCTLR
 	tst	r0, #SCTLR_C_BIT
 	ASM_ASSERT(eq)
diff --git a/lib/cpus/aarch32/cpu_helpers.S b/lib/cpus/aarch32/cpu_helpers.S
index dc1b6e6..7606b8e 100644
--- a/lib/cpus/aarch32/cpu_helpers.S
+++ b/lib/cpus/aarch32/cpu_helpers.S
@@ -53,7 +53,7 @@
 	/* Get the matching cpu_ops pointer (clobbers: r0 - r5) */
 	bl	get_cpu_ops_ptr
 
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	r0, #0
 	ASM_ASSERT(ne)
 #endif
@@ -92,7 +92,7 @@
 	pop	{r2, lr}
 
 	ldr	r0, [r0, #CPU_DATA_CPU_OPS_PTR]
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	r0, #0
 	ASM_ASSERT(ne)
 #endif
@@ -118,7 +118,7 @@
 	cmp	r1, #0
 	bne	1f
 	bl	get_cpu_ops_ptr
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	r0, #0
 	ASM_ASSERT(ne)
 #endif
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index 47cb6a2..6a39916 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -55,7 +55,7 @@
 
 	/* Get the matching cpu_ops pointer */
 	bl	get_cpu_ops_ptr
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	x0, #0
 	ASM_ASSERT(ne)
 #endif
@@ -94,7 +94,7 @@
 
 	mrs	x1, tpidr_el3
 	ldr	x0, [x1, #CPU_DATA_CPU_OPS_PTR]
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	x0, #0
 	ASM_ASSERT(ne)
 #endif
@@ -120,7 +120,7 @@
 	cbnz	x0, 1f
 	mov	x10, x30
 	bl	get_cpu_ops_ptr
-#if ASM_ASSERTION
+#if ENABLE_ASSERTIONS
 	cmp	x0, #0
 	ASM_ASSERT(ne)
 #endif