2 * This file contains assembly-language implementations
3 * of IP-style 1's complement checksum routines.
5 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version
10 * 2 of the License, or (at your option) any later version.
12 * Severely hacked about by Paul Mackerras (paulus@cs.anu.edu.au).
15 #include <linux/sys.h>
16 #include <asm/processor.h>
17 #include <asm/cache.h>
18 #include <asm/errno.h>
19 #include <asm/ppc_asm.h>
20 #include <asm/export.h>
25 * computes the checksum of a memory block at buff, length len,
26 * and adds in "sum" (32-bit)
28 * __csum_partial(buff, len, sum)
30 _GLOBAL(__csum_partial)
32 srawi. r6,r4,2 /* Divide len by 4 and also clear carry */
33 beq 3f /* if we're doing < 4 bytes */
34 andi. r0,r3,2 /* Align buffer to longword boundary */
36 lhz r0,4(r3) /* do 2 bytes to get aligned */
39 srwi. r6,r4,2 /* # words to do */
42 1: andi. r6,r6,3 /* Prepare to handle words 4 by 4 */
48 21: srwi. r6,r4,4 /* # blocks of 4 words to do */
77 slwi r0,r0,8 /* Upper byte of word */
79 5: addze r3,r5 /* add in final carry */
81 EXPORT_SYMBOL(__csum_partial)
84 * Computes the checksum of a memory block at src, length len,
85 * and adds in "sum" (32-bit), while copying the block to dst.
86 * If an access exception occurs on src or dst, it stores -EFAULT
87 * to *src_err or *dst_err respectively, and (for an error on
88 * src) zeroes the rest of dst.
90 * csum_partial_copy_generic(src, dst, len, sum, src_err, dst_err)
92 #define CSUM_COPY_16_BYTES_WITHEX(n) \
114 #define CSUM_COPY_16_BYTES_EXCODE(n) \
115 EX_TABLE(8 ## n ## 0b, src_error); \
116 EX_TABLE(8 ## n ## 1b, src_error); \
117 EX_TABLE(8 ## n ## 2b, src_error); \
118 EX_TABLE(8 ## n ## 3b, src_error); \
119 EX_TABLE(8 ## n ## 4b, dst_error); \
120 EX_TABLE(8 ## n ## 5b, dst_error); \
121 EX_TABLE(8 ## n ## 6b, dst_error); \
122 EX_TABLE(8 ## n ## 7b, dst_error);
125 .stabs "arch/powerpc/lib/",N_SO,0,0,0f
126 .stabs "checksum_32.S",N_SO,0,0,0f
129 CACHELINE_BYTES = L1_CACHE_BYTES
130 LG_CACHELINE_BYTES = L1_CACHE_SHIFT
131 CACHELINE_MASK = (L1_CACHE_BYTES-1)
133 _GLOBAL(csum_partial_copy_generic)
142 andi. r0,r0,CACHELINE_MASK /* # bytes to start of cache line */
146 cmplw 0,r5,r0 /* is this more than total to do? */
147 blt 63f /* if not much to do */
149 rlwnm r12,r12,r7,0,31 /* odd destination address: rotate one byte */
150 cmplwi cr7,r7,0 /* is destination address even ? */
151 andi. r8,r0,3 /* get it word-aligned first */
155 70: lbz r9,4(r4) /* do some bytes */
167 72: lwzu r9,4(r4) /* do some words */
172 58: srwi. r0,r5,LG_CACHELINE_BYTES /* # complete cachelines */
173 clrlwi r5,r5,32-LG_CACHELINE_BYTES
177 /* Here we decide how far ahead to prefetch the source */
183 #if MAX_COPY_PREFETCH > 1
184 /* Heuristically, for large transfers we prefetch
185 MAX_COPY_PREFETCH cachelines ahead. For small transfers
186 we prefetch 1 cacheline ahead. */
187 cmpwi r0,MAX_COPY_PREFETCH
189 li r7,MAX_COPY_PREFETCH
192 addi r3,r3,CACHELINE_BYTES
196 addi r3,r3,CACHELINE_BYTES
197 #endif /* MAX_COPY_PREFETCH > 1 */
205 /* the main body of the cacheline loop */
206 CSUM_COPY_16_BYTES_WITHEX(0)
207 #if L1_CACHE_BYTES >= 32
208 CSUM_COPY_16_BYTES_WITHEX(1)
209 #if L1_CACHE_BYTES >= 64
210 CSUM_COPY_16_BYTES_WITHEX(2)
211 CSUM_COPY_16_BYTES_WITHEX(3)
212 #if L1_CACHE_BYTES >= 128
213 CSUM_COPY_16_BYTES_WITHEX(4)
214 CSUM_COPY_16_BYTES_WITHEX(5)
215 CSUM_COPY_16_BYTES_WITHEX(6)
216 CSUM_COPY_16_BYTES_WITHEX(7)
250 rlwinm r3,r3,8,0,31 /* odd destination address: rotate one byte */
272 EX_TABLE(70b, src_error);
273 EX_TABLE(71b, dst_error);
274 EX_TABLE(72b, src_error);
275 EX_TABLE(73b, dst_error);
276 EX_TABLE(54b, dst_error);
279 * this stuff handles faults in the cacheline loop and branches to either
280 * src_error (if in read part) or dst_error (if in write part)
282 CSUM_COPY_16_BYTES_EXCODE(0)
283 #if L1_CACHE_BYTES >= 32
284 CSUM_COPY_16_BYTES_EXCODE(1)
285 #if L1_CACHE_BYTES >= 64
286 CSUM_COPY_16_BYTES_EXCODE(2)
287 CSUM_COPY_16_BYTES_EXCODE(3)
288 #if L1_CACHE_BYTES >= 128
289 CSUM_COPY_16_BYTES_EXCODE(4)
290 CSUM_COPY_16_BYTES_EXCODE(5)
291 CSUM_COPY_16_BYTES_EXCODE(6)
292 CSUM_COPY_16_BYTES_EXCODE(7)
297 EX_TABLE(30b, src_error);
298 EX_TABLE(31b, dst_error);
299 EX_TABLE(40b, src_error);
300 EX_TABLE(41b, dst_error);
301 EX_TABLE(50b, src_error);
302 EX_TABLE(51b, dst_error);
304 EXPORT_SYMBOL(csum_partial_copy_generic)
307 * __sum16 csum_ipv6_magic(const struct in6_addr *saddr,
308 * const struct in6_addr *daddr,
309 * __u32 len, __u8 proto, __wsum sum)
312 _GLOBAL(csum_ipv6_magic)
328 add r5, r5, r6 /* assumption: len + proto doesn't carry */
335 rlwinm r3, r3, 16, 16, 31
337 EXPORT_SYMBOL(csum_ipv6_magic)