patch-2.4.5 linux/arch/ppc/kernel/bitops.c

Next file: linux/arch/ppc/kernel/checks.c
Previous file: linux/arch/ppc/kernel/apus_setup.c
Back to the patch index
Back to the overall index

diff -u --recursive --new-file v2.4.4/linux/arch/ppc/kernel/bitops.c linux/arch/ppc/kernel/bitops.c
@@ -1,4 +1,7 @@
 /*
+ * BK Id: SCCS/s.bitops.c 1.7 05/17/01 18:14:21 cort
+ */
+/*
  * Copyright (C) 1996 Paul Mackerras.
  */
 
@@ -16,10 +19,10 @@
 	unsigned long mask = 1 << (nr & 0x1f);
 	unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
 	
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%3
-	or	%0,%0,%2
-	stwcx.	%0,0,%3
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%3 \n\
+	or	%0,%0,%2 \n\
+	stwcx.	%0,0,%3 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=m" (*p)
@@ -33,10 +36,10 @@
 	unsigned long mask = 1 << (nr & 0x1f);
 	unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
 
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%3
-	andc	%0,%0,%2
-	stwcx.	%0,0,%3
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%3 \n\
+	andc	%0,%0,%2 \n\
+	stwcx.	%0,0,%3 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=m" (*p)
@@ -50,10 +53,10 @@
 	unsigned long mask = 1 << (nr & 0x1f);
 	unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
 
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%3
-	xor	%0,%0,%2
-	stwcx.	%0,0,%3
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%3 \n\
+	xor	%0,%0,%2 \n\
+	stwcx.	%0,0,%3 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=m" (*p)
@@ -67,10 +70,10 @@
 	unsigned int mask = 1 << (nr & 0x1f);
 	volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
 
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%4
-	or	%1,%0,%3
-	stwcx.	%1,0,%4
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%4 \n\
+	or	%1,%0,%3 \n\
+	stwcx.	%1,0,%4 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=&r" (t), "=m" (*p)
@@ -86,10 +89,10 @@
 	unsigned int mask = 1 << (nr & 0x1f);
 	volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
 
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%4
-	andc	%1,%0,%3
-	stwcx.	%1,0,%4
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%4 \n\
+	andc	%1,%0,%3 \n\
+	stwcx.	%1,0,%4 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=&r" (t), "=m" (*p)
@@ -105,10 +108,10 @@
 	unsigned int mask = 1 << (nr & 0x1f);
 	volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
 
-	__asm__ __volatile__(SMP_WMB "\
-1:	lwarx	%0,0,%4
-	xor	%1,%0,%3
-	stwcx.	%1,0,%4
+	__asm__ __volatile__(SMP_WMB "\n\
+1:	lwarx	%0,0,%4 \n\
+	xor	%1,%0,%3 \n\
+	stwcx.	%1,0,%4 \n\
 	bne	1b"
 	SMP_MB
 	: "=&r" (old), "=&r" (t), "=m" (*p)

FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen (who was at: slshen@lbl.gov)