patches/glibc/ports-2.10.1/560-ppc-atomic.patch
author "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
Fri Apr 15 00:22:42 2011 +0200 (2011-04-15)
changeset 2544 751c3f735ada
permissions -rw-r--r--
scripts/internals: do not remove lib{32,64}/ symlinks after build

During the build, we create lib{32,64}/ symlinks out of the sysroot.
In some cases (eg. mingw32 target), these symlinks are still required
when running the toolchain. For other combinations, the symlinks are
without incidence, so they can be safely kept after the build.

Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
     1 sniped from suse
     2 
     3 Index: sysdeps/powerpc/bits/atomic.h
     4 ===================================================================
     5 RCS file: /cvs/glibc/libc/sysdeps/powerpc/bits/atomic.h,v
     6 retrieving revision 1.17
     7 diff -u -a -p -r1.17 atomic.h
     8 
     9 diff -durN glibc-2.10.1.orig/sysdeps/powerpc/bits/atomic.h glibc-2.10.1/sysdeps/powerpc/bits/atomic.h
    10 --- glibc-2.10.1.orig/sysdeps/powerpc/bits/atomic.h	2007-03-26 22:15:28.000000000 +0200
    11 +++ glibc-2.10.1/sysdeps/powerpc/bits/atomic.h	2009-11-13 00:51:19.000000000 +0100
    12 @@ -85,14 +85,14 @@
    13        __typeof (*(mem)) __tmp;						      \
    14        __typeof (mem)  __memp = (mem);					      \
    15        __asm __volatile (						      \
    16 -		        "1:	lwarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
    17 +		        "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
    18  		        "	cmpw	%0,%2\n"			      \
    19  		        "	bne	2f\n"				      \
    20 -		        "	stwcx.	%3,0,%1\n"			      \
    21 +		        "	stwcx.	%3,%y1\n"			      \
    22  		        "	bne-	1b\n"				      \
    23  		        "2:	" __ARCH_ACQ_INSTR			      \
    24 -		        : "=&r" (__tmp)					      \
    25 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
    26 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
    27 +		        : "r" (oldval), "r" (newval)			      \
    28  		        : "cr0", "memory");				      \
    29        __tmp;								      \
    30    })
    31 @@ -102,14 +102,14 @@
    32        __typeof (*(mem)) __tmp;						      \
    33        __typeof (mem)  __memp = (mem);					      \
    34        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
    35 -		        "1:	lwarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
    36 +		        "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
    37  		        "	cmpw	%0,%2\n"			      \
    38  		        "	bne	2f\n"				      \
    39 -		        "	stwcx.	%3,0,%1\n"			      \
    40 +		        "	stwcx.	%3,%y1\n"			      \
    41  		        "	bne-	1b\n"				      \
    42  		        "2:	"					      \
    43 -		        : "=&r" (__tmp)					      \
    44 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
    45 +		        : "=&r" (__tmp), "+Z" (__memp)			      \
    46 +		        : "r" (oldval), "r" (newval)			      \
    47  		        : "cr0", "memory");				      \
    48        __tmp;								      \
    49    })
    50 @@ -118,12 +118,12 @@
    51    ({									      \
    52      __typeof (*mem) __val;						      \
    53      __asm __volatile (							      \
    54 -		      "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
    55 -		      "		stwcx.	%3,0,%2\n"			      \
    56 +		      "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
    57 +		      "		stwcx.	%2,%y1\n"			      \
    58  		      "		bne-	1b\n"				      \
    59  		      "   " __ARCH_ACQ_INSTR				      \
    60 -		      : "=&r" (__val), "=m" (*mem)			      \
    61 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    62 +		      : "=&r" (__val), "+Z" (*mem)			      \
    63 +		      : "r" (value)					      \
    64  		      : "cr0", "memory");				      \
    65      __val;								      \
    66    })
    67 @@ -132,11 +132,11 @@
    68    ({									      \
    69      __typeof (*mem) __val;						      \
    70      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
    71 -		      "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
    72 -		      "		stwcx.	%3,0,%2\n"			      \
    73 +		      "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
    74 +		      "		stwcx.	%2,%y1\n"			      \
    75  		      "		bne-	1b"				      \
    76 -		      : "=&r" (__val), "=m" (*mem)			      \
    77 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    78 +		      : "=&r" (__val), "+Z" (*mem)			      \
    79 +		      : "r" (value)					      \
    80  		      : "cr0", "memory");				      \
    81      __val;								      \
    82    })
    83 @@ -144,12 +144,12 @@
    84  #define __arch_atomic_exchange_and_add_32(mem, value) \
    85    ({									      \
    86      __typeof (*mem) __val, __tmp;					      \
    87 -    __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
    88 -		      "		add	%1,%0,%4\n"			      \
    89 -		      "		stwcx.	%1,0,%3\n"			      \
    90 +    __asm __volatile ("1:	lwarx	%0,%y2\n"			      \
    91 +		      "		add	%1,%0,%3\n"			      \
    92 +		      "		stwcx.	%1,%y2\n"			      \
    93  		      "		bne-	1b"				      \
    94 -		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
    95 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    96 +		      : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem)	      \
    97 +		      : "r" (value)					      \
    98  		      : "cr0", "memory");				      \
    99      __val;								      \
   100    })
   101 @@ -157,12 +157,12 @@
   102  #define __arch_atomic_increment_val_32(mem) \
   103    ({									      \
   104      __typeof (*(mem)) __val;						      \
   105 -    __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
   106 +    __asm __volatile ("1:	lwarx	%0,%y1\n"			      \
   107  		      "		addi	%0,%0,1\n"			      \
   108 -		      "		stwcx.	%0,0,%2\n"			      \
   109 +		      "		stwcx.	%0,%y1\n"			      \
   110  		      "		bne-	1b"				      \
   111 -		      : "=&b" (__val), "=m" (*mem)			      \
   112 -		      : "b" (mem), "m" (*mem)				      \
   113 +		      : "=&b" (__val), "+Z" (*mem)			      \
   114 +		      :							      \
   115  		      : "cr0", "memory");				      \
   116      __val;								      \
   117    })
   118 @@ -170,27 +170,27 @@
   119  #define __arch_atomic_decrement_val_32(mem) \
   120    ({									      \
   121      __typeof (*(mem)) __val;						      \
   122 -    __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
   123 +    __asm __volatile ("1:	lwarx	%0,%y1\n"			      \
   124  		      "		subi	%0,%0,1\n"			      \
   125 -		      "		stwcx.	%0,0,%2\n"			      \
   126 +		      "		stwcx.	%0,%y1\n"			      \
   127  		      "		bne-	1b"				      \
   128 -		      : "=&b" (__val), "=m" (*mem)			      \
   129 -		      : "b" (mem), "m" (*mem)				      \
   130 +		      : "=&b" (__val), "+Z" (*mem)			      \
   131 +		      :							      \
   132  		      : "cr0", "memory");				      \
   133      __val;								      \
   134    })
   135  
   136  #define __arch_atomic_decrement_if_positive_32(mem) \
   137    ({ int __val, __tmp;							      \
   138 -     __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
   139 +     __asm __volatile ("1:	lwarx	%0,%y2\n"			      \
   140  		       "	cmpwi	0,%0,0\n"			      \
   141  		       "	addi	%1,%0,-1\n"			      \
   142  		       "	ble	2f\n"				      \
   143 -		       "	stwcx.	%1,0,%3\n"			      \
   144 +		       "	stwcx.	%1,%y2\n"			      \
   145  		       "	bne-	1b\n"				      \
   146  		       "2:	" __ARCH_ACQ_INSTR			      \
   147 -		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   148 -		       : "b" (mem), "m" (*mem)				      \
   149 +		       : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem)	      \
   150 +		       :						      \
   151  		       : "cr0", "memory");				      \
   152       __val;								      \
   153    })
   154 diff -durN glibc-2.10.1.orig/sysdeps/powerpc/powerpc32/bits/atomic.h glibc-2.10.1/sysdeps/powerpc/powerpc32/bits/atomic.h
   155 --- glibc-2.10.1.orig/sysdeps/powerpc/powerpc32/bits/atomic.h	2007-03-26 22:15:45.000000000 +0200
   156 +++ glibc-2.10.1/sysdeps/powerpc/powerpc32/bits/atomic.h	2009-11-13 00:51:19.000000000 +0100
   157 @@ -44,14 +44,14 @@
   158  ({									      \
   159    unsigned int __tmp;							      \
   160    __asm __volatile (							      \
   161 -		    "1:	lwarx	%0,0,%1" MUTEX_HINT_ACQ "\n"		      \
   162 +		    "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"		      \
   163  		    "	subf.	%0,%2,%0\n"				      \
   164  		    "	bne	2f\n"					      \
   165 -		    "	stwcx.	%3,0,%1\n"				      \
   166 +		    "	stwcx.	%3,%y1\n"				      \
   167  		    "	bne-	1b\n"					      \
   168  		    "2:	" __ARCH_ACQ_INSTR				      \
   169 -		    : "=&r" (__tmp)					      \
   170 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   171 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   172 +		    : "r" (oldval), "r" (newval)			      \
   173  		    : "cr0", "memory");					      \
   174    __tmp != 0;								      \
   175  })
   176 @@ -60,14 +60,14 @@
   177  ({									      \
   178    unsigned int __tmp;							      \
   179    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   180 -		    "1:	lwarx	%0,0,%1" MUTEX_HINT_REL "\n"		      \
   181 +		    "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"		      \
   182  		    "	subf.	%0,%2,%0\n"				      \
   183  		    "	bne	2f\n"					      \
   184 -		    "	stwcx.	%3,0,%1\n"				      \
   185 +		    "	stwcx.	%3,%y1\n"				      \
   186  		    "	bne-	1b\n"					      \
   187  		    "2:	"						      \
   188 -		    : "=&r" (__tmp)					      \
   189 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   190 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   191 +		    : "r" (oldval), "r" (newval)			      \
   192  		    : "cr0", "memory");					      \
   193    __tmp != 0;								      \
   194  })
   195 diff -durN glibc-2.10.1.orig/sysdeps/powerpc/powerpc64/bits/atomic.h glibc-2.10.1/sysdeps/powerpc/powerpc64/bits/atomic.h
   196 --- glibc-2.10.1.orig/sysdeps/powerpc/powerpc64/bits/atomic.h	2007-03-26 22:16:03.000000000 +0200
   197 +++ glibc-2.10.1/sysdeps/powerpc/powerpc64/bits/atomic.h	2009-11-13 00:51:19.000000000 +0100
   198 @@ -44,14 +44,14 @@
   199  ({									      \
   200    unsigned int __tmp, __tmp2;						      \
   201    __asm __volatile ("   clrldi  %1,%1,32\n"				      \
   202 -		    "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	 	      \
   203 +		    "1:	lwarx	%0,%y2" MUTEX_HINT_ACQ "\n"	 	      \
   204  		    "	subf.	%0,%1,%0\n"				      \
   205  		    "	bne	2f\n"					      \
   206 -		    "	stwcx.	%4,0,%2\n"				      \
   207 +		    "	stwcx.	%4,%y2\n"				      \
   208  		    "	bne-	1b\n"					      \
   209  		    "2:	" __ARCH_ACQ_INSTR				      \
   210 -		    : "=&r" (__tmp), "=r" (__tmp2)			      \
   211 -		    : "b" (mem), "1" (oldval), "r" (newval)		      \
   212 +		    : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem))	      \
   213 +		    : "1" (oldval), "r" (newval)			      \
   214  		    : "cr0", "memory");					      \
   215    __tmp != 0;								      \
   216  })
   217 @@ -61,14 +61,14 @@
   218    unsigned int __tmp, __tmp2;						      \
   219    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   220  		    "   clrldi  %1,%1,32\n"				      \
   221 -		    "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
   222 +		    "1:	lwarx	%0,%y2" MUTEX_HINT_REL "\n"		      \
   223  		    "	subf.	%0,%1,%0\n"				      \
   224  		    "	bne	2f\n"					      \
   225 -		    "	stwcx.	%4,0,%2\n"				      \
   226 +		    "	stwcx.	%4,%y2\n"				      \
   227  		    "	bne-	1b\n"					      \
   228  		    "2:	"						      \
   229 -		    : "=&r" (__tmp), "=r" (__tmp2)			      \
   230 -		    : "b" (mem), "1" (oldval), "r" (newval)		      \
   231 +		    : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem))	      \
   232 +		    : "1" (oldval), "r" (newval)			      \
   233  		    : "cr0", "memory");					      \
   234    __tmp != 0;								      \
   235  })
   236 @@ -82,14 +82,14 @@
   237  ({									      \
   238    unsigned long	__tmp;							      \
   239    __asm __volatile (							      \
   240 -		    "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"		      \
   241 +		    "1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"		      \
   242  		    "	subf.	%0,%2,%0\n"				      \
   243  		    "	bne	2f\n"					      \
   244 -		    "	stdcx.	%3,0,%1\n"				      \
   245 +		    "	stdcx.	%3,%y1\n"				      \
   246  		    "	bne-	1b\n"					      \
   247  		    "2:	" __ARCH_ACQ_INSTR				      \
   248 -		    : "=&r" (__tmp)					      \
   249 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   250 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   251 +		    : "r" (oldval), "r" (newval)			      \
   252  		    : "cr0", "memory");					      \
   253    __tmp != 0;								      \
   254  })
   255 @@ -98,14 +98,14 @@
   256  ({									      \
   257    unsigned long	__tmp;							      \
   258    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   259 -		    "1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
   260 +		    "1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"		      \
   261  		    "	subf.	%0,%2,%0\n"				      \
   262  		    "	bne	2f\n"					      \
   263 -		    "	stdcx.	%3,0,%1\n"				      \
   264 +		    "	stdcx.	%3,%y1\n"				      \
   265  		    "	bne-	1b\n"					      \
   266  		    "2:	"						      \
   267 -		    : "=&r" (__tmp)					      \
   268 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   269 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   270 +		    : "r" (oldval), "r" (newval)			      \
   271  		    : "cr0", "memory");					      \
   272    __tmp != 0;								      \
   273  })
   274 @@ -115,14 +115,14 @@
   275        __typeof (*(mem)) __tmp;						      \
   276        __typeof (mem)  __memp = (mem);					      \
   277        __asm __volatile (						      \
   278 -		        "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
   279 +		        "1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
   280  		        "	cmpd	%0,%2\n"			      \
   281  		        "	bne	2f\n"				      \
   282 -		        "	stdcx.	%3,0,%1\n"			      \
   283 +		        "	stdcx.	%3,%y1\n"			      \
   284  		        "	bne-	1b\n"				      \
   285  		        "2:	" __ARCH_ACQ_INSTR			      \
   286 -		        : "=&r" (__tmp)					      \
   287 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
   288 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
   289 +		        : "r" (oldval), "r" (newval)			      \
   290  		        : "cr0", "memory");				      \
   291        __tmp;								      \
   292    })
   293 @@ -132,14 +132,14 @@
   294        __typeof (*(mem)) __tmp;						      \
   295        __typeof (mem)  __memp = (mem);					      \
   296        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   297 -		        "1:	ldarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
   298 +		        "1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
   299  		        "	cmpd	%0,%2\n"			      \
   300  		        "	bne	2f\n"				      \
   301 -		        "	stdcx.	%3,0,%1\n"			      \
   302 +		        "	stdcx.	%3,%y1\n"			      \
   303  		        "	bne-	1b\n"				      \
   304  		        "2:	"					      \
   305 -		        : "=&r" (__tmp)					      \
   306 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
   307 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
   308 +		        : "r" (oldval), "r" (newval)			      \
   309  		        : "cr0", "memory");				      \
   310        __tmp;								      \
   311    })
   312 @@ -148,12 +148,12 @@
   313      ({									      \
   314        __typeof (*mem) __val;						      \
   315        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   316 -			"1:	ldarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
   317 -			"	stdcx.	%3,0,%2\n"			      \
   318 +			"1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
   319 +			"	stdcx.	%2,%y1\n"			      \
   320  			"	bne-	1b\n"				      \
   321  		  " " __ARCH_ACQ_INSTR					      \
   322 -			: "=&r" (__val), "=m" (*mem)			      \
   323 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   324 +			: "=&r" (__val), "+Z" (*(mem))			      \
   325 +			: "r" (value)					      \
   326  			: "cr0", "memory");				      \
   327        __val;								      \
   328      })
   329 @@ -162,11 +162,11 @@
   330      ({									      \
   331        __typeof (*mem) __val;						      \
   332        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   333 -			"1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
   334 -			"	stdcx.	%3,0,%2\n"			      \
   335 +			"1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
   336 +			"	stdcx.	%2,%y1\n"			      \
   337  			"	bne-	1b"				      \
   338 -			: "=&r" (__val), "=m" (*mem)			      \
   339 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   340 +			: "=&r" (__val), "+Z" (*(mem))			      \
   341 +			: "r" (value)					      \
   342  			: "cr0", "memory");				      \
   343        __val;								      \
   344      })
   345 @@ -174,12 +174,12 @@
   346  #define __arch_atomic_exchange_and_add_64(mem, value) \
   347      ({									      \
   348        __typeof (*mem) __val, __tmp;					      \
   349 -      __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
   350 -			"	add	%1,%0,%4\n"			      \
   351 -			"	stdcx.	%1,0,%3\n"			      \
   352 +      __asm __volatile ("1:	ldarx	%0,%y2\n"			      \
   353 +			"	add	%1,%0,%3\n"			      \
   354 +			"	stdcx.	%1,%y2\n"			      \
   355  			"	bne-	1b"				      \
   356 -			: "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   357 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   358 +			: "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem))	      \
   359 +			: "r" (value)					      \
   360  			: "cr0", "memory");				      \
   361        __val;								      \
   362      })
   363 @@ -187,12 +187,12 @@
   364  #define __arch_atomic_increment_val_64(mem) \
   365      ({									      \
   366        __typeof (*(mem)) __val;						      \
   367 -      __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
   368 +      __asm __volatile ("1:	ldarx	%0,%y1\n"			      \
   369  			"	addi	%0,%0,1\n"			      \
   370 -			"	stdcx.	%0,0,%2\n"			      \
   371 +			"	stdcx.	%0,%y1\n"			      \
   372  			"	bne-	1b"				      \
   373 -			: "=&b" (__val), "=m" (*mem)			      \
   374 -			: "b" (mem), "m" (*mem)				      \
   375 +			: "=&b" (__val), "+Z" (*(mem))			      \
   376 +			:						      \
   377  			: "cr0", "memory");				      \
   378        __val;								      \
   379      })
   380 @@ -200,27 +200,27 @@
   381  #define __arch_atomic_decrement_val_64(mem) \
   382      ({									      \
   383        __typeof (*(mem)) __val;						      \
   384 -      __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
   385 +      __asm __volatile ("1:	ldarx	%0,%y1\n"			      \
   386  			"	subi	%0,%0,1\n"			      \
   387 -			"	stdcx.	%0,0,%2\n"			      \
   388 +			"	stdcx.	%0,%y1\n"			      \
   389  			"	bne-	1b"				      \
   390 -			: "=&b" (__val), "=m" (*mem)			      \
   391 -			: "b" (mem), "m" (*mem)				      \
   392 +			: "=&b" (__val), "+Z" (*(mem))			      \
   393 +			:						      \
   394  			: "cr0", "memory");				      \
   395        __val;								      \
   396      })
   397  
   398  #define __arch_atomic_decrement_if_positive_64(mem) \
   399    ({ int __val, __tmp;							      \
   400 -     __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
   401 +     __asm __volatile ("1:	ldarx	%0,%y2\n"			      \
   402  		       "	cmpdi	0,%0,0\n"			      \
   403  		       "	addi	%1,%0,-1\n"			      \
   404  		       "	ble	2f\n"				      \
   405 -		       "	stdcx.	%1,0,%3\n"			      \
   406 +		       "	stdcx.	%1,%y2\n"			      \
   407  		       "	bne-	1b\n"				      \
   408  		       "2:	" __ARCH_ACQ_INSTR			      \
   409 -		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   410 -		       : "b" (mem), "m" (*mem)				      \
   411 +		       : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem))	      \
   412 +		       :						      \
   413  		       : "cr0", "memory");				      \
   414       __val;								      \
   415    })