feat(drtm): flush dcache before DLME launch

Flush the data cache range before DLME launch to ensure that data
passed by DCE preamble is committed.

Signed-off-by: Manish Pandey <manish.pandey2@arm.com>
Change-Id: I9946fd3420a17b86d9f1483e8b2cd5880033454e
diff --git a/services/std_svc/drtm/drtm_main.c b/services/std_svc/drtm/drtm_main.c
index c98d829..e0f5c17 100644
--- a/services/std_svc/drtm/drtm_main.c
+++ b/services/std_svc/drtm/drtm_main.c
@@ -322,8 +322,8 @@
 	uint64_t dlme_start, dlme_end;
 	uint64_t dlme_img_start, dlme_img_ep, dlme_img_end;
 	uint64_t dlme_data_start, dlme_data_end;
-	uintptr_t args_mapping;
-	size_t args_mapping_size;
+	uintptr_t va_mapping;
+	size_t va_mapping_size;
 	struct_drtm_dl_args *a;
 	struct_drtm_dl_args args_buf;
 	int rc;
@@ -334,16 +334,16 @@
 		return INVALID_PARAMETERS;
 	}
 
-	args_mapping_size = ALIGNED_UP(sizeof(struct_drtm_dl_args), DRTM_PAGE_SIZE);
+	va_mapping_size = ALIGNED_UP(sizeof(struct_drtm_dl_args), DRTM_PAGE_SIZE);
 
 	/* check DRTM parameters are within NS address region */
-	rc = plat_drtm_validate_ns_region(x1, args_mapping_size);
+	rc = plat_drtm_validate_ns_region(x1, va_mapping_size);
 	if (rc != 0) {
 		ERROR("DRTM: parameters lies within secure memory\n");
 		return INVALID_PARAMETERS;
 	}
 
-	rc = mmap_add_dynamic_region_alloc_va(x1, &args_mapping, args_mapping_size,
+	rc = mmap_add_dynamic_region_alloc_va(x1, &va_mapping, va_mapping_size,
 					      MT_MEMORY | MT_NS | MT_RO |
 					      MT_SHAREABILITY_ISH);
 	if (rc != 0) {
@@ -351,15 +351,14 @@
 		      __func__, rc);
 		return INTERNAL_ERROR;
 	}
-	a = (struct_drtm_dl_args *)args_mapping;
-	/*
-	 * TODO: invalidate all data cache before reading the data passed by the
-	 * DCE Preamble.  This is required to avoid / defend against racing with
-	 * cache evictions.
-	 */
+	a = (struct_drtm_dl_args *)va_mapping;
+
+	/* Sanitize cache of data passed in args by the DCE Preamble. */
+	flush_dcache_range(va_mapping, va_mapping_size);
+
 	args_buf = *a;
 
-	rc = mmap_remove_dynamic_region(args_mapping, args_mapping_size);
+	rc = mmap_remove_dynamic_region(va_mapping, va_mapping_size);
 	if (rc) {
 		ERROR("%s(): mmap_remove_dynamic_region() failed unexpectedly"
 		      " rc=%d\n", __func__, rc);
@@ -458,6 +457,28 @@
 		}
 	}
 
+	/*
+	 * Map and sanitize the cache of data range passed by DCE Preamble. This
+	 * is required to avoid / defend against racing with cache evictions
+	 */
+	va_mapping_size = ALIGNED_UP((dlme_end - dlme_start), DRTM_PAGE_SIZE);
+	rc = mmap_add_dynamic_region_alloc_va(dlme_img_start, &va_mapping, va_mapping_size,
+					      MT_MEMORY | MT_NS | MT_RO |
+					      MT_SHAREABILITY_ISH);
+	if (rc != 0) {
+		ERROR("DRTM: %s: mmap_add_dynamic_region_alloc_va() failed rc=%d\n",
+		      __func__, rc);
+		return INTERNAL_ERROR;
+	}
+	flush_dcache_range(va_mapping, va_mapping_size);
+
+	rc = mmap_remove_dynamic_region(va_mapping, va_mapping_size);
+	if (rc) {
+		ERROR("%s(): mmap_remove_dynamic_region() failed unexpectedly"
+		      " rc=%d\n", __func__, rc);
+		panic();
+	}
+
 	*a_out = *a;
 	return SUCCESS;
 }