1 /* SPDX-License-Identifier: GPL-2.0 */
4 #include <asm/asmmacro.h>
5 #include <asm/cacheasm.h>
6 #include <asm/processor.h>
8 * RB-Data: RedBoot data/bss
12 * The Linux-Kernel image including the loader must be loaded
13 * to a position so that the kernel and the boot parameters
14 * can fit in the space before the load address.
15 * ______________________________________________________
16 * |_RB-Data_|_P_|__________|_L_|___Linux-Kernel___|______|
19 * ______________________________________________________
20 * |___Linux-Kernel___|_P_|_L_|___________________________|
22 * The loader copies the parameter to the position that will
23 * be the end of the kernel and itself to the end of the
27 /* Make sure we have enough space for the 'uncompressor' */
29 #define STACK_SIZE 32768
30 #define HEAP_SIZE (131072*4)
33 # a3: Size of parameter list
38 /* this must be the first byte of the loader! */
40 abi_entry(32) # we do not intend to return
47 .begin literal_prefix .text
49 /* put literals in here! */
54 /* 'reset' window registers */
59 #if XCHAL_HAVE_WINDOWED
66 movi a4, KERNEL_PS_WOE_MASK
70 KABI_C0 mov abi_saved0, abi_arg0
72 /* copy the loader to its address
73 * Note: The loader itself is a very small piece, so we assume we
74 * don't partially overlap. We also assume (even more important)
75 * that the kernel image is out of the way. Usually, when the
76 * load address of this image is not at an arbitrary address,
77 * but aligned to some 10K's we shouldn't overlap.
80 /* Note: The assembler cannot relax "addi a0, a0, ..." to an
81 l32r, so we load to a4 first. */
83 # addi a4, a0, __start - __start_a0
94 # a0: address where this code has been loaded
95 # a4: compiled address of __start
96 # a5: compiled end address
115 /* We have to flush and invalidate the caches here before we jump. */
117 #if XCHAL_DCACHE_IS_WRITEBACK
119 ___flush_dcache_all a5 a6
123 ___invalidate_icache_all a5 a6
132 /* RedBoot is now at the end of the memory, so we don't have
133 * to copy the parameter list. Keep the code around; in case
134 * we need it again. */
137 # a2: start address of parameter list
138 # a3: length of parameter list
141 /* copy the parameter list out of the way */
143 movi a6, _param_start
153 /* clear BSS section */
163 movi a1, _stack + STACK_SIZE
166 /* Uncompress the kernel */
172 movi a3, __image_load
177 # a8(a4) Load address of the image
179 movi abi_arg0, _image_start
180 movi abi_arg4, _image_end
181 movi abi_arg1, 0x1000000
182 sub abi_tmp0, abi_arg4, abi_arg0
183 movi abi_arg3, complen
184 s32i abi_tmp0, abi_arg3, 0
188 # abi_arg0 destination
189 # abi_arg1 maximum size of destination
191 # abi_arg3 ptr to length
194 movi abi_tmp0, gunzip
202 # abi_arg0 destination start
203 # abi_arg1 maximum size of destination
204 # abi_arg2 source start
205 # abi_arg3 ptr to length
206 # abi_arg4 destination end
209 l32i abi_tmp0, abi_arg2, 0
210 l32i abi_tmp1, abi_arg2, 4
211 s32i abi_tmp0, abi_arg0, 0
212 s32i abi_tmp1, abi_arg0, 4
213 l32i abi_tmp0, abi_arg2, 8
214 l32i abi_tmp1, abi_arg2, 12
215 s32i abi_tmp0, abi_arg0, 8
216 s32i abi_tmp1, abi_arg0, 12
217 addi abi_arg0, abi_arg0, 16
218 addi abi_arg2, abi_arg2, 16
219 blt abi_arg0, abi_arg4, 1b
222 /* jump to the kernel */
224 #if XCHAL_DCACHE_IS_WRITEBACK
226 ___flush_dcache_all a5 a6
230 ___invalidate_icache_all a5 a6
234 # a2 Boot parameter list
236 KABI_C0 mov abi_arg0, abi_saved0
237 movi a0, _image_start
247 .long _heap + HEAP_SIZE
249 .comm _stack, STACK_SIZE
250 .comm _heap, HEAP_SIZE