1 #include <linux/linkage.h>
2 #include <asm-generic/export.h>
4 #include <asm/asm-extable.h>
7 .macro fixup op reg addr lbl
10 _asm_extable 100b, \lbl
13 ENTRY(__asm_copy_to_user)
14 ENTRY(__asm_copy_from_user)
16 /* Enable access to user memory */
20 /* Save for return value */
24 * Register allocation for code below:
25 * a0 - start of uncopied dst
26 * a1 - start of uncopied src
28 * t0 - end of uncopied dst
33 * Use byte copy only if too small.
34 * SZREG holds 4 for RV32 and 8 for RV64
36 li a3, 9*SZREG /* size must be larger than size in word_copy */
37 bltu a2, a3, .Lbyte_copy_tail
40 * Copy first bytes until dst is aligned to word boundary.
42 * t1 - start of aligned dst
45 andi t1, t1, ~(SZREG-1)
46 /* dst is already aligned, skip */
47 beq a0, t1, .Lskip_align_dst
49 /* a5 - one byte for copying data */
50 fixup lb a5, 0(a1), 10f
51 addi a1, a1, 1 /* src */
52 fixup sb a5, 0(a0), 10f
53 addi a0, a0, 1 /* dst */
54 bltu a0, t1, 1b /* t1 - start of aligned dst */
59 * Use shift-copy if src is misaligned.
60 * Use word-copy if both src and dst are aligned because
61 * can not use shift-copy which do not require shifting
63 /* a1 - start of src */
69 * Both src and dst are aligned, unrolled word copy
71 * a0 - start of aligned dst
72 * a1 - start of aligned src
73 * t0 - end of aligned dst
75 addi t0, t0, -(8*SZREG) /* not to over run */
77 fixup REG_L a4, 0(a1), 10f
78 fixup REG_L a5, SZREG(a1), 10f
79 fixup REG_L a6, 2*SZREG(a1), 10f
80 fixup REG_L a7, 3*SZREG(a1), 10f
81 fixup REG_L t1, 4*SZREG(a1), 10f
82 fixup REG_L t2, 5*SZREG(a1), 10f
83 fixup REG_L t3, 6*SZREG(a1), 10f
84 fixup REG_L t4, 7*SZREG(a1), 10f
85 fixup REG_S a4, 0(a0), 10f
86 fixup REG_S a5, SZREG(a0), 10f
87 fixup REG_S a6, 2*SZREG(a0), 10f
88 fixup REG_S a7, 3*SZREG(a0), 10f
89 fixup REG_S t1, 4*SZREG(a0), 10f
90 fixup REG_S t2, 5*SZREG(a0), 10f
91 fixup REG_S t3, 6*SZREG(a0), 10f
92 fixup REG_S t4, 7*SZREG(a0), 10f
97 addi t0, t0, 8*SZREG /* revert to original value */
103 * Word copy with shifting.
104 * For misaligned copy we still perform aligned word copy, but
105 * we need to use the value fetched from the previous iteration and
107 * This is safe because reading is less than a word size.
109 * a0 - start of aligned dst
111 * a3 - a1 & mask:(SZREG-1)
112 * t0 - end of uncopied dst
113 * t1 - end of aligned dst
115 /* calculating aligned word boundary for dst */
116 andi t1, t0, ~(SZREG-1)
117 /* Converting unaligned src to aligned src */
118 andi a1, a1, ~(SZREG-1)
125 slli t3, a3, 3 /* converting bytes in a3 to bits */
129 /* Load the first word to combine with second word */
130 fixup REG_L a5, 0(a1), 10f
133 /* Main shifting copy
135 * a0 - start of aligned dst
136 * a1 - start of aligned src
137 * t1 - end of aligned dst
140 /* At least one iteration will be executed */
142 fixup REG_L a5, SZREG(a1), 10f
146 fixup REG_S a2, 0(a0), 10f
150 /* Revert src to original unaligned value */
155 * Byte copy anything left.
157 * a0 - start of remaining dst
158 * a1 - start of remaining src
159 * t0 - end of remaining dst
161 bgeu a0, t0, .Lout_copy_user /* check if end of copy */
163 fixup lb a5, 0(a1), 10f
164 addi a1, a1, 1 /* src */
165 fixup sb a5, 0(a0), 10f
166 addi a0, a0, 1 /* dst */
167 bltu a0, t0, 4b /* t0 - end of dst */
170 /* Disable access to user memory */
175 /* Exception fixup code */
177 /* Disable access to user memory */
181 ENDPROC(__asm_copy_to_user)
182 ENDPROC(__asm_copy_from_user)
183 EXPORT_SYMBOL(__asm_copy_to_user)
184 EXPORT_SYMBOL(__asm_copy_from_user)
189 /* Enable access to user memory */
195 andi t1, a3, ~(SZREG-1)
196 andi t0, t0, ~(SZREG-1)
198 * a3: terminal address of target region
199 * t0: lowest doubleword-aligned address in target region
200 * t1: highest doubleword-aligned address in target region
205 fixup REG_S, zero, (a0), 11f
212 /* Disable access to user memory */
216 4: /* Edge case: unalignment */
217 fixup sb, zero, (a0), 11f
221 5: /* Edge case: remainder */
222 fixup sb, zero, (a0), 11f
227 /* Exception fixup code */
229 /* Disable access to user memory */
233 ENDPROC(__clear_user)
234 EXPORT_SYMBOL(__clear_user)