arm: arm926ej-s: Add sunxi code

Some Allwinner SoCs use ARM926EJ-S core.

Add Allwinner/sunXi specific code to ARM926EJ-S CPU dircetory.

Signed-off-by: Icenowy Zheng <icenowy@aosc.io>
Reviewed-by: Andre Przywara <andre.przywara@arm.com>
Signed-off-by: Jesse Taube <Mr.Bossman075@gmail.com>
Signed-off-by: Andre Przywara <andre.przywara@arm.com>
diff --git a/arch/arm/cpu/arm926ejs/Makefile b/arch/arm/cpu/arm926ejs/Makefile
index b901b7c..7f1436d 100644
--- a/arch/arm/cpu/arm926ejs/Makefile
+++ b/arch/arm/cpu/arm926ejs/Makefile
@@ -15,6 +15,7 @@
 obj-$(CONFIG_MX27) += mx27/
 obj-$(if $(filter mxs,$(SOC)),y) += mxs/
 obj-$(if $(filter spear,$(SOC)),y) += spear/
+obj-$(CONFIG_ARCH_SUNXI) += sunxi/
 
 # some files can only build in ARM or THUMB2, not THUMB1
 
diff --git a/arch/arm/cpu/arm926ejs/cpu.c b/arch/arm/cpu/arm926ejs/cpu.c
index 93d7a02..2ce413a 100644
--- a/arch/arm/cpu/arm926ejs/cpu.c
+++ b/arch/arm/cpu/arm926ejs/cpu.c
@@ -21,6 +21,19 @@
 
 static void cache_flush(void);
 
+/************************************************************
+ * sdelay() - simple spin loop.  Will be constant time as
+ *  its generally used in bypass conditions only.  This
+ *  is necessary until timers are accessible.
+ *
+ *  not inline to increase chances its in cache when called
+ *************************************************************/
+void sdelay(unsigned long loops)
+{
+	__asm__ volatile ("1:\n" "subs %0, %1, #1\n"
+			  "bne 1b":"=r" (loops):"0"(loops));
+}
+
 int cleanup_before_linux (void)
 {
 	/*
diff --git a/arch/arm/cpu/arm926ejs/sunxi/Makefile b/arch/arm/cpu/arm926ejs/sunxi/Makefile
new file mode 100644
index 0000000..7d8b959
--- /dev/null
+++ b/arch/arm/cpu/arm926ejs/sunxi/Makefile
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: GPL-2.0+
+# (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
+
+obj-y	+= fel_utils.o
+CFLAGS_fel_utils.o := -marm
diff --git a/arch/arm/cpu/arm926ejs/sunxi/config.mk b/arch/arm/cpu/arm926ejs/sunxi/config.mk
new file mode 100644
index 0000000..76ffec9
--- /dev/null
+++ b/arch/arm/cpu/arm926ejs/sunxi/config.mk
@@ -0,0 +1,6 @@
+# Build a combined spl + u-boot image
+ifdef CONFIG_SPL
+ifndef CONFIG_SPL_BUILD
+ALL-y += u-boot-sunxi-with-spl.bin
+endif
+endif
diff --git a/arch/arm/cpu/arm926ejs/sunxi/fel_utils.S b/arch/arm/cpu/arm926ejs/sunxi/fel_utils.S
new file mode 100644
index 0000000..08be7ed
--- /dev/null
+++ b/arch/arm/cpu/arm926ejs/sunxi/fel_utils.S
@@ -0,0 +1,33 @@
+/* SPDX-License-Identifier: GPL-2.0+ */
+/*
+ * Utility functions for FEL mode.
+ *
+ * Copyright (c) 2015 Google, Inc
+ */
+
+#include <asm-offsets.h>
+#include <config.h>
+#include <asm/system.h>
+#include <linux/linkage.h>
+
+ENTRY(save_boot_params)
+	ldr	r0, =fel_stash
+	str	sp, [r0, #0]
+	str	lr, [r0, #4]
+	mrs	lr, cpsr		@ Read CPSR
+	str	lr, [r0, #8]
+	mrc	p15, 0, lr, c1, c0, 0	@ Read CP15 SCTLR Register
+	str	lr, [r0, #12]
+	b	save_boot_params_ret
+ENDPROC(save_boot_params)
+
+ENTRY(return_to_fel)
+	mov	sp, r0
+	mov	lr, r1
+	ldr	r0, =fel_stash
+	ldr	r1, [r0, #16]
+	mcr	p15, 0, r1, c1, c0, 0	@ Write CP15 Control Register
+	ldr	r1, [r0, #12]
+	msr	cpsr, r1		@ Write CPSR
+	bx	lr
+ENDPROC(return_to_fel)
diff --git a/arch/arm/cpu/arm926ejs/sunxi/u-boot-spl.lds b/arch/arm/cpu/arm926ejs/sunxi/u-boot-spl.lds
new file mode 100644
index 0000000..9a000ac
--- /dev/null
+++ b/arch/arm/cpu/arm926ejs/sunxi/u-boot-spl.lds
@@ -0,0 +1,48 @@
+/* SPDX-License-Identifier: GPL-2.0+ */
+/*
+ * (C) Copyright 2018
+ * Icenowy Zheng <icenowy@aosc.io>
+ *
+ * Based on arch/arm/cpu/armv7/sunxi/u-boot-spl.lds:
+ */
+MEMORY { .sram : ORIGIN = CONFIG_SPL_TEXT_BASE,\
+		LENGTH = CONFIG_SPL_MAX_SIZE }
+MEMORY { .sdram : ORIGIN = CONFIG_SPL_BSS_START_ADDR, \
+		LENGTH = CONFIG_SPL_BSS_MAX_SIZE }
+
+OUTPUT_FORMAT("elf32-littlearm", "elf32-littlearm", "elf32-littlearm")
+OUTPUT_ARCH(arm)
+ENTRY(_start)
+SECTIONS
+{
+	.text      :
+	{
+		__start = .;
+		*(.vectors)
+		*(.text*)
+	} > .sram
+
+	. = ALIGN(4);
+	.rodata : { *(SORT_BY_ALIGNMENT(.rodata*)) } >.sram
+
+	. = ALIGN(4);
+	.data : { *(SORT_BY_ALIGNMENT(.data*)) } >.sram
+
+	. = ALIGN(4);
+	.u_boot_list : {
+		KEEP(*(SORT(.u_boot_list*)));
+	} > .sram
+
+	. = ALIGN(4);
+	__image_copy_end = .;
+	_end = .;
+
+	.bss :
+	{
+		. = ALIGN(4);
+		__bss_start = .;
+		*(.bss*)
+		. = ALIGN(4);
+		__bss_end = .;
+	} > .sdram
+}