patch-2.4.5 linux/include/asm-ppc/bitops.h
Next file: linux/include/asm-ppc/board.h
Previous file: linux/include/asm-ppc/backlight.h
Back to the patch index
Back to the overall index
- Lines: 99
- Date:
Mon May 21 15:02:06 2001
- Orig file:
v2.4.4/linux/include/asm-ppc/bitops.h
- Orig date:
Sat Nov 11 18:23:10 2000
diff -u --recursive --new-file v2.4.4/linux/include/asm-ppc/bitops.h linux/include/asm-ppc/bitops.h
@@ -1,5 +1,7 @@
/*
- * $Id: bitops.h,v 1.12 2000/02/09 03:28:31 davem Exp $
+ * BK Id: SCCS/s.bitops.h 1.7 05/17/01 18:14:24 cort
+ */
+/*
* bitops.h: Bit string operations on the ppc
*/
@@ -35,10 +37,10 @@
unsigned long mask = 1 << (nr & 0x1f);
unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
- __asm__ __volatile__("\
-1: lwarx %0,0,%3
- or %0,%0,%2
- stwcx. %0,0,%3
+ __asm__ __volatile__("\n\
+1: lwarx %0,0,%3 \n\
+ or %0,%0,%2 \n\
+ stwcx. %0,0,%3 \n\
bne- 1b"
: "=&r" (old), "=m" (*p)
: "r" (mask), "r" (p), "m" (*p)
@@ -68,10 +70,10 @@
unsigned long mask = 1 << (nr & 0x1f);
unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
- __asm__ __volatile__("\
-1: lwarx %0,0,%3
- andc %0,%0,%2
- stwcx. %0,0,%3
+ __asm__ __volatile__("\n\
+1: lwarx %0,0,%3 \n\
+ andc %0,%0,%2 \n\
+ stwcx. %0,0,%3 \n\
bne- 1b"
: "=&r" (old), "=m" (*p)
: "r" (mask), "r" (p), "m" (*p)
@@ -84,10 +86,10 @@
unsigned long mask = 1 << (nr & 0x1f);
unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
- __asm__ __volatile__("\
-1: lwarx %0,0,%3
- xor %0,%0,%2
- stwcx. %0,0,%3
+ __asm__ __volatile__("\n\
+1: lwarx %0,0,%3 \n\
+ xor %0,%0,%2 \n\
+ stwcx. %0,0,%3 \n\
bne- 1b"
: "=&r" (old), "=m" (*p)
: "r" (mask), "r" (p), "m" (*p)
@@ -103,10 +105,10 @@
unsigned int mask = 1 << (nr & 0x1f);
volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
- __asm__ __volatile__(SMP_WMB "\
-1: lwarx %0,0,%4
- or %1,%0,%3
- stwcx. %1,0,%4
+ __asm__ __volatile__(SMP_WMB "\n\
+1: lwarx %0,0,%4 \n\
+ or %1,%0,%3 \n\
+ stwcx. %1,0,%4 \n\
bne 1b"
SMP_MB
: "=&r" (old), "=&r" (t), "=m" (*p)
@@ -135,10 +137,10 @@
unsigned int mask = 1 << (nr & 0x1f);
volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
- __asm__ __volatile__(SMP_WMB "\
-1: lwarx %0,0,%4
- andc %1,%0,%3
- stwcx. %1,0,%4
+ __asm__ __volatile__(SMP_WMB "\n\
+1: lwarx %0,0,%4 \n\
+ andc %1,%0,%3 \n\
+ stwcx. %1,0,%4 \n\
bne 1b"
SMP_MB
: "=&r" (old), "=&r" (t), "=m" (*p)
@@ -167,10 +169,10 @@
unsigned int mask = 1 << (nr & 0x1f);
volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
- __asm__ __volatile__(SMP_WMB "\
-1: lwarx %0,0,%4
- xor %1,%0,%3
- stwcx. %1,0,%4
+ __asm__ __volatile__(SMP_WMB "\n\
+1: lwarx %0,0,%4 \n\
+ xor %1,%0,%3 \n\
+ stwcx. %1,0,%4 \n\
bne 1b"
SMP_MB
: "=&r" (old), "=&r" (t), "=m" (*p)
FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen (who was at: slshen@lbl.gov)