@@ -29,3 +29,4 @@ obj-$(CONFIG_CPU_VR41XX) += dump_tlb.o
# libgcc-style stuff needed in the kernel
obj-y += ashldi3.o ashrdi3.o cmpdi2.o lshrdi3.o ucmpdi2.o
+obj-y += csum_partial_test.o
@@ -101,7 +101,7 @@
.text
.set noreorder
.align 5
-LEAF(csum_partial)
+LEAF(asm_csum_partial)
move sum, zero
move t7, zero
@@ -296,7 +296,7 @@ LEAF(csum_partial)
ADDC32(sum, a2)
jr ra
.set noreorder
- END(csum_partial)
+ END(asm_csum_partial)
/*
new file mode 100644
@@ -0,0 +1,109 @@
+#include <net/checksum.h>
+
+static inline __sum16 ref_csum_fold(__wsum csum)
+{
+ u32 sum = (__force u32)csum;
+ sum = (sum & 0xffff) + (sum >> 16);
+ sum = (sum & 0xffff) + (sum >> 16);
+ return (__force __sum16)~sum;
+}
+
+static inline unsigned short from32to16(unsigned int x)
+{
+ /* add up 16-bit and 16-bit for 16+c bit */
+ x = (x & 0xffff) + (x >> 16);
+ /* add up carry.. */
+ x = (x & 0xffff) + (x >> 16);
+ return x;
+}
+
+static unsigned int do_csum(const unsigned char * buff, int len)
+{
+ int odd, count;
+ unsigned int result = 0;
+
+ if (len <= 0)
+ goto out;
+ odd = 1 & (unsigned long) buff;
+ if (odd) {
+#ifdef __BIG_ENDIAN
+ result = *buff;
+#else
+ result = *buff << 8;
+#endif
+ len--;
+ buff++;
+ }
+ count = len >> 1; /* nr of 16-bit words.. */
+ if (count) {
+ if (2 & (unsigned long) buff) {
+ result += *(unsigned short *) buff;
+ count--;
+ len -= 2;
+ buff += 2;
+ }
+ count >>= 1; /* nr of 32-bit words.. */
+ if (count) {
+ unsigned int carry = 0;
+ do {
+ unsigned int w = *(unsigned int *) buff;
+ count--;
+ buff += 4;
+ result += carry;
+ result += w;
+ carry = (w > result);
+ } while (count);
+ result += carry;
+ result = (result & 0xffff) + (result >> 16);
+ }
+ if (len & 2) {
+ result += *(unsigned short *) buff;
+ buff += 2;
+ }
+ }
+ if (len & 1) {
+#ifdef __BIG_ENDIAN
+ result += (*buff << 8);
+#else
+ result += *buff;
+#endif
+ }
+ result = from32to16(result);
+ if (odd)
+ result = ((result >> 8) & 0xff) | ((result & 0xff) << 8);
+out:
+ return result;
+}
+
+static __wsum ref_csum_partial(const void *buff, int len, __wsum sum)
+{
+ unsigned int result = do_csum(buff, len);
+
+ /* add in old sum, and carry.. */
+ result += (__force u32)sum;
+ if ((__force u32)sum > result)
+ result += 1;
+ return (__force __wsum)result;
+}
+
+__wsum asm_csum_partial(const void *buff, int len, __wsum sum);
+
+__wsum csum_partial(const void *buff, int len, __wsum sum)
+{
+ __wsum ref_wsum = ref_csum_partial(buff, len, sum);
+ __sum16 ref_sum = ref_csum_fold(ref_wsum);
+ __wsum cal_wsum = asm_csum_partial(buff, len, sum);
+ __sum16 cal_sum = csum_fold(cal_wsum);
+ if (ref_sum != cal_sum) {
+ int i;
+ printk("csum_partial error."
+ " %#04x(%#08x) != %#04x(%#08x)\n",
+ ref_sum, ref_wsum, cal_sum, cal_wsum);
+ printk("len %#04x, sum %#08x\n", len, sum);
+ printk("buf");
+ for (i = 0; i < len; i++)
+ printk(" %#02x", *((const u8 *)buff + i));
+ printk("\n");
+ }
+ return ref_wsum;
+}