patch-2.1.26 linux/arch/alpha/lib/csum_partial_copy.c
Next file: linux/arch/alpha/lib/strncat.S
Previous file: linux/arch/alpha/lib/csum_ipv6_magic.S
Back to the patch index
Back to the overall index
- Lines: 326
- Date:
Thu Feb 6 12:44:41 1997
- Orig file:
v2.1.25/linux/arch/alpha/lib/csum_partial_copy.c
- Orig date:
Wed Jul 17 14:39:21 1996
diff -u --recursive --new-file v2.1.25/linux/arch/alpha/lib/csum_partial_copy.c linux/arch/alpha/lib/csum_partial_copy.c
@@ -7,6 +7,11 @@
* we do for performance..
*/
+#include <linux/types.h>
+#include <asm/string.h>
+#include <asm/uaccess.h>
+
+
#define ldq_u(x,y) \
__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(unsigned long *)(y)))
@@ -31,17 +36,52 @@
#define insqh(x,y,z) \
__asm__ __volatile__("insqh %1,%2,%0":"=r" (z):"r" (x),"r" (y))
+
+#define __get_user_u(x,ptr) \
+({ \
+ long __guu_err; \
+ __asm__ __volatile__( \
+ "1: ldq_u %0,%2\n" \
+ "2:\n" \
+ ".section __ex_table,\"a\"\n" \
+ " .gprel32 1b\n" \
+ " lda %0,2b-1b(%1)\n" \
+ ".previous" \
+ : "=r"(x), "=r"(__guu_err) \
+ : "m"(__m(ptr)), "1"(0)); \
+ __guu_err; \
+})
+
+#define __put_user_u(x,ptr) \
+({ \
+ long __puu_err; \
+ __asm__ __volatile__( \
+ "1: stq_u %2,%1\n" \
+ "2:\n" \
+ ".section __ex_table,\"a\"\n" \
+ " .gprel32 1b" \
+ " lda $31,2b-1b(%0)\n" \
+ ".previous" \
+ : "=r"(__puu_err) \
+ : "m"(__m(addr)), "rJ"(x), "0"(0)); \
+ __puu_err; \
+})
+
+
/*
* Ok. This isn't fun, but this is the EASY case.
*/
-static inline unsigned long csum_partial_copy_aligned(
- unsigned long *src, unsigned long *dst,
- long len, unsigned long checksum)
+static inline unsigned long
+csum_partial_cfu_aligned(unsigned long *src, unsigned long *dst,
+ long len, unsigned long checksum,
+ int *errp)
{
unsigned long carry = 0;
+ int err = 0;
while (len >= 0) {
- unsigned long word = *src;
+ unsigned long word;
+ err |= __get_user(word, src);
checksum += carry;
src++;
checksum += word;
@@ -54,7 +94,7 @@
checksum += carry;
if (len) {
unsigned long word, tmp;
- word = *src;
+ err |= __get_user(word, src);
tmp = *dst;
mskql(word, len, word);
checksum += word;
@@ -63,28 +103,31 @@
*dst = word | tmp;
checksum += carry;
}
- return checksum;
+ if (err) *errp = err;
+ return checksum;
}
/*
* This is even less fun, but this is still reasonably
* easy.
*/
-static inline unsigned long csum_partial_copy_dest_aligned(
- unsigned long *src, unsigned long *dst,
- unsigned long soff,
- long len, unsigned long checksum)
+static inline unsigned long
+csum_partial_cfu_dest_aligned(unsigned long *src, unsigned long *dst,
+ unsigned long soff,
+ long len, unsigned long checksum,
+ int *errp)
{
unsigned long first;
unsigned long word, carry;
unsigned long lastsrc = 7+len+(unsigned long)src;
+ int err = 0;
- ldq_u(first,src);
+ err |= __get_user_u(first,src);
carry = 0;
while (len >= 0) {
unsigned long second;
- ldq_u(second, src+1);
+ err |= __get_user_u(second, src+1);
extql(first, soff, word);
len -= 8;
src++;
@@ -102,7 +145,7 @@
if (len) {
unsigned long tmp;
unsigned long second;
- ldq_u(second, lastsrc);
+ err |= __get_user_u(second, lastsrc);
tmp = *dst;
extql(first, soff, word);
extqh(second, soff, first);
@@ -114,25 +157,28 @@
*dst = word | tmp;
checksum += carry;
}
+ if (err) *errp = err;
return checksum;
}
/*
* This is slightly less fun than the above..
*/
-static inline unsigned long csum_partial_copy_src_aligned(
- unsigned long *src, unsigned long *dst,
- unsigned long doff,
- long len, unsigned long checksum,
- unsigned long partial_dest)
+static inline unsigned long
+csum_partial_cfu_src_aligned(unsigned long *src, unsigned long *dst,
+ unsigned long doff,
+ long len, unsigned long checksum,
+ unsigned long partial_dest,
+ int *errp)
{
unsigned long carry = 0;
unsigned long word;
+ int err = 0;
mskql(partial_dest, doff, partial_dest);
while (len >= 0) {
unsigned long second_dest;
- word = *src;
+ err |= __get_user(word, src);
len -= 8;
insql(word, doff, second_dest);
checksum += carry;
@@ -147,7 +193,7 @@
checksum += carry;
if (len >= 0) {
unsigned long second_dest;
- word = *src;
+ err |= __get_user(word, src);
mskql(word, len-doff, word);
checksum += word;
insql(word, doff, second_dest);
@@ -162,7 +208,7 @@
checksum += carry;
} else if (len & 7) {
unsigned long second_dest;
- word = *src;
+ err |= __get_user(word, src);
ldq_u(second_dest, dst);
mskql(word, len-doff, word);
checksum += word;
@@ -172,6 +218,7 @@
stq_u(partial_dest | word | second_dest, dst);
checksum += carry;
}
+ if (err) *errp = err;
return checksum;
}
@@ -179,24 +226,26 @@
* This is so totally un-fun that it's frightening. Don't
* look at this too closely, you'll go blind.
*/
-static inline unsigned long csum_partial_copy_unaligned(
- unsigned long * src, unsigned long * dst,
- unsigned long soff, unsigned long doff,
- long len, unsigned long checksum,
- unsigned long partial_dest)
+static inline unsigned long
+csum_partial_cfu_unaligned(unsigned long * src, unsigned long * dst,
+ unsigned long soff, unsigned long doff,
+ long len, unsigned long checksum,
+ unsigned long partial_dest,
+ int *errp)
{
unsigned long carry = 0;
unsigned long first;
unsigned long lastsrc;
+ int err = 0;
- ldq_u(first, src);
+ err |= __get_user_u(first, src);
lastsrc = 7+len+(unsigned long)src;
mskql(partial_dest, doff, partial_dest);
while (len >= 0) {
unsigned long second, word;
unsigned long second_dest;
- ldq_u(second, src+1);
+ err |= __get_user_u(second, src+1);
extql(first, soff, word);
checksum += carry;
len -= 8;
@@ -216,8 +265,8 @@
if (len >= 0) {
unsigned long second, word;
unsigned long second_dest;
-
- ldq_u(second, lastsrc);
+
+ err |= __get_user_u(second, lastsrc);
extql(first, soff, word);
extqh(second, soff, first);
word |= first;
@@ -233,12 +282,12 @@
mskqh(second_dest, len, second_dest);
stq_u(partial_dest | second_dest, dst+1);
}
- checksum += carry;
+ checksum += carry;
} else if (len & 7) {
unsigned long second, word;
unsigned long second_dest;
- ldq_u(second, lastsrc);
+ err |= __get_user_u(second, lastsrc);
extql(first, soff, word);
extqh(second, soff, first);
word |= first;
@@ -251,42 +300,51 @@
stq_u(partial_dest | word | second_dest, dst);
checksum += carry;
}
+ if (err) *errp = err;
return checksum;
}
-unsigned int csum_partial_copy(char *src, char *dst, int len, int sum)
+unsigned int
+csum_partial_copy_from_user(int *errp, char *src, char *dst,
+ int len, unsigned int sum)
{
unsigned long checksum = (unsigned) sum;
unsigned long soff = 7 & (unsigned long) src;
unsigned long doff = 7 & (unsigned long) dst;
+ if (!access_ok(src, len, VERIFY_READ)) {
+ *errp = -EFAULT;
+ memset(dst, 0, len);
+ return checksum;
+ }
+
if (len) {
if (!doff) {
if (!soff)
- checksum = csum_partial_copy_aligned(
+ checksum = csum_partial_cfu_aligned(
(unsigned long *) src,
(unsigned long *) dst,
- len-8, checksum);
+ len-8, checksum, errp);
else
- checksum = csum_partial_copy_dest_aligned(
+ checksum = csum_partial_cfu_dest_aligned(
(unsigned long *) src,
(unsigned long *) dst,
- soff, len-8, checksum);
+ soff, len-8, checksum, errp);
} else {
unsigned long partial_dest;
ldq_u(partial_dest, dst);
if (!soff)
- checksum = csum_partial_copy_src_aligned(
+ checksum = csum_partial_cfu_src_aligned(
(unsigned long *) src,
(unsigned long *) dst,
doff, len-8, checksum,
- partial_dest);
+ partial_dest, errp);
else
- checksum = csum_partial_copy_unaligned(
+ checksum = csum_partial_cfu_unaligned(
(unsigned long *) src,
(unsigned long *) dst,
soff, doff, len-8, checksum,
- partial_dest);
+ partial_dest, errp);
}
/* 64 -> 33 bits */
checksum = (checksum & 0xffffffff) + (checksum >> 32);
@@ -297,4 +355,17 @@
checksum = (checksum & 0xffff) + (checksum >> 16);
}
return checksum;
+}
+
+unsigned int
+csum_partial_copy (const char *src, char *dst, int len, unsigned int sum)
+{
+ unsigned int ret;
+ int error = 0;
+
+ ret = csum_partial_copy_from_user(&error, src, dst, len, sum);
+ if (error)
+ printk("csum_partial_copy_old(): tell mingo to convert me!\n");
+
+ return ret;
}
FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen, slshen@lbl.gov