1 #include <linux/linkage.h>
2 #include <asm-generic/export.h>
4 #include <asm/asm-extable.h>
7 .macro fixup op reg addr lbl
10 _asm_extable 100b, \lbl
13 ENTRY(__asm_copy_to_user)
14 ENTRY(__asm_copy_from_user)
16 /* Enable access to user memory */
21 * Save the terminal address which will be used to compute the number
22 * of bytes copied in case of a fixup exception.
27 * Register allocation for code below:
28 * a0 - start of uncopied dst
29 * a1 - start of uncopied src
31 * t0 - end of uncopied dst
36 * Use byte copy only if too small.
37 * SZREG holds 4 for RV32 and 8 for RV64
39 li a3, 9*SZREG /* size must be larger than size in word_copy */
40 bltu a2, a3, .Lbyte_copy_tail
43 * Copy first bytes until dst is aligned to word boundary.
45 * t1 - start of aligned dst
48 andi t1, t1, ~(SZREG-1)
49 /* dst is already aligned, skip */
50 beq a0, t1, .Lskip_align_dst
52 /* a5 - one byte for copying data */
53 fixup lb a5, 0(a1), 10f
54 addi a1, a1, 1 /* src */
55 fixup sb a5, 0(a0), 10f
56 addi a0, a0, 1 /* dst */
57 bltu a0, t1, 1b /* t1 - start of aligned dst */
62 * Use shift-copy if src is misaligned.
63 * Use word-copy if both src and dst are aligned because
64 * can not use shift-copy which do not require shifting
66 /* a1 - start of src */
72 * Both src and dst are aligned, unrolled word copy
74 * a0 - start of aligned dst
75 * a1 - start of aligned src
76 * t0 - end of aligned dst
78 addi t0, t0, -(8*SZREG) /* not to over run */
80 fixup REG_L a4, 0(a1), 10f
81 fixup REG_L a5, SZREG(a1), 10f
82 fixup REG_L a6, 2*SZREG(a1), 10f
83 fixup REG_L a7, 3*SZREG(a1), 10f
84 fixup REG_L t1, 4*SZREG(a1), 10f
85 fixup REG_L t2, 5*SZREG(a1), 10f
86 fixup REG_L t3, 6*SZREG(a1), 10f
87 fixup REG_L t4, 7*SZREG(a1), 10f
88 fixup REG_S a4, 0(a0), 10f
89 fixup REG_S a5, SZREG(a0), 10f
90 fixup REG_S a6, 2*SZREG(a0), 10f
91 fixup REG_S a7, 3*SZREG(a0), 10f
92 fixup REG_S t1, 4*SZREG(a0), 10f
93 fixup REG_S t2, 5*SZREG(a0), 10f
94 fixup REG_S t3, 6*SZREG(a0), 10f
95 fixup REG_S t4, 7*SZREG(a0), 10f
100 addi t0, t0, 8*SZREG /* revert to original value */
106 * Word copy with shifting.
107 * For misaligned copy we still perform aligned word copy, but
108 * we need to use the value fetched from the previous iteration and
110 * This is safe because reading is less than a word size.
112 * a0 - start of aligned dst
114 * a3 - a1 & mask:(SZREG-1)
115 * t0 - end of uncopied dst
116 * t1 - end of aligned dst
118 /* calculating aligned word boundary for dst */
119 andi t1, t0, ~(SZREG-1)
120 /* Converting unaligned src to aligned src */
121 andi a1, a1, ~(SZREG-1)
128 slli t3, a3, 3 /* converting bytes in a3 to bits */
132 /* Load the first word to combine with second word */
133 fixup REG_L a5, 0(a1), 10f
136 /* Main shifting copy
138 * a0 - start of aligned dst
139 * a1 - start of aligned src
140 * t1 - end of aligned dst
143 /* At least one iteration will be executed */
145 fixup REG_L a5, SZREG(a1), 10f
149 fixup REG_S a2, 0(a0), 10f
153 /* Revert src to original unaligned value */
158 * Byte copy anything left.
160 * a0 - start of remaining dst
161 * a1 - start of remaining src
162 * t0 - end of remaining dst
164 bgeu a0, t0, .Lout_copy_user /* check if end of copy */
166 fixup lb a5, 0(a1), 10f
167 addi a1, a1, 1 /* src */
168 fixup sb a5, 0(a0), 10f
169 addi a0, a0, 1 /* dst */
170 bltu a0, t0, 4b /* t0 - end of dst */
173 /* Disable access to user memory */
178 /* Exception fixup code */
180 /* Disable access to user memory */
184 ENDPROC(__asm_copy_to_user)
185 ENDPROC(__asm_copy_from_user)
186 EXPORT_SYMBOL(__asm_copy_to_user)
187 EXPORT_SYMBOL(__asm_copy_from_user)
192 /* Enable access to user memory */
198 andi t1, a3, ~(SZREG-1)
199 andi t0, t0, ~(SZREG-1)
201 * a3: terminal address of target region
202 * t0: lowest doubleword-aligned address in target region
203 * t1: highest doubleword-aligned address in target region
208 fixup REG_S, zero, (a0), 11f
215 /* Disable access to user memory */
219 4: /* Edge case: unalignment */
220 fixup sb, zero, (a0), 11f
224 5: /* Edge case: remainder */
225 fixup sb, zero, (a0), 11f
230 /* Exception fixup code */
232 /* Disable access to user memory */
236 ENDPROC(__clear_user)
237 EXPORT_SYMBOL(__clear_user)