patches/glibc/2.9/500-ppc-glibc-2.9-atomic.patch
author "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
Sun Jan 17 23:06:02 2010 +0100 (2010-01-17)
changeset 1740 c57458bb354d
parent 1201 c9967a6e3b25
permissions -rw-r--r--
configure: do not require hg when configuring in an hg clone

When configuring in an hg clone, we need hg to compute the version string.
It can happen that users do not have Mercurial (eg. if they got a snapshot
rather that they did a full clone). In this case, we can still run, of
course, so simply fill the version string with a sufficiently explicit
value, that does not require hg. The date is a good candidate.
     1 Original patch from: gentoo/src/patchsets/glibc/2.9/6120_all_ppc-glibc-2.9-atomic.patch
     2 
     3 -= BEGIN original header =-
     4 sniped from suse
     5 
     6 -= END original header =-
     7 
     8 diff -durN glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h glibc-2_9/sysdeps/powerpc/bits/atomic.h
     9 --- glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h	2007-03-26 22:15:28.000000000 +0200
    10 +++ glibc-2_9/sysdeps/powerpc/bits/atomic.h	2009-02-02 22:01:40.000000000 +0100
    11 @@ -85,14 +85,14 @@
    12        __typeof (*(mem)) __tmp;						      \
    13        __typeof (mem)  __memp = (mem);					      \
    14        __asm __volatile (						      \
    15 -		        "1:	lwarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
    16 +		        "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
    17  		        "	cmpw	%0,%2\n"			      \
    18  		        "	bne	2f\n"				      \
    19 -		        "	stwcx.	%3,0,%1\n"			      \
    20 +		        "	stwcx.	%3,%y1\n"			      \
    21  		        "	bne-	1b\n"				      \
    22  		        "2:	" __ARCH_ACQ_INSTR			      \
    23 -		        : "=&r" (__tmp)					      \
    24 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
    25 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
    26 +		        : "r" (oldval), "r" (newval)			      \
    27  		        : "cr0", "memory");				      \
    28        __tmp;								      \
    29    })
    30 @@ -102,14 +102,14 @@
    31        __typeof (*(mem)) __tmp;						      \
    32        __typeof (mem)  __memp = (mem);					      \
    33        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
    34 -		        "1:	lwarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
    35 +		        "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
    36  		        "	cmpw	%0,%2\n"			      \
    37  		        "	bne	2f\n"				      \
    38 -		        "	stwcx.	%3,0,%1\n"			      \
    39 +		        "	stwcx.	%3,%y1\n"			      \
    40  		        "	bne-	1b\n"				      \
    41  		        "2:	"					      \
    42 -		        : "=&r" (__tmp)					      \
    43 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
    44 +		        : "=&r" (__tmp), "+Z" (__memp)			      \
    45 +		        : "r" (oldval), "r" (newval)			      \
    46  		        : "cr0", "memory");				      \
    47        __tmp;								      \
    48    })
    49 @@ -118,12 +118,12 @@
    50    ({									      \
    51      __typeof (*mem) __val;						      \
    52      __asm __volatile (							      \
    53 -		      "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
    54 -		      "		stwcx.	%3,0,%2\n"			      \
    55 +		      "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
    56 +		      "		stwcx.	%2,%y1\n"			      \
    57  		      "		bne-	1b\n"				      \
    58  		      "   " __ARCH_ACQ_INSTR				      \
    59 -		      : "=&r" (__val), "=m" (*mem)			      \
    60 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    61 +		      : "=&r" (__val), "+Z" (*mem)			      \
    62 +		      : "r" (value)					      \
    63  		      : "cr0", "memory");				      \
    64      __val;								      \
    65    })
    66 @@ -132,11 +132,11 @@
    67    ({									      \
    68      __typeof (*mem) __val;						      \
    69      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
    70 -		      "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
    71 -		      "		stwcx.	%3,0,%2\n"			      \
    72 +		      "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
    73 +		      "		stwcx.	%2,%y1\n"			      \
    74  		      "		bne-	1b"				      \
    75 -		      : "=&r" (__val), "=m" (*mem)			      \
    76 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    77 +		      : "=&r" (__val), "+Z" (*mem)			      \
    78 +		      : "r" (value)					      \
    79  		      : "cr0", "memory");				      \
    80      __val;								      \
    81    })
    82 @@ -144,12 +144,12 @@
    83  #define __arch_atomic_exchange_and_add_32(mem, value) \
    84    ({									      \
    85      __typeof (*mem) __val, __tmp;					      \
    86 -    __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
    87 -		      "		add	%1,%0,%4\n"			      \
    88 -		      "		stwcx.	%1,0,%3\n"			      \
    89 +    __asm __volatile ("1:	lwarx	%0,%y2\n"			      \
    90 +		      "		add	%1,%0,%3\n"			      \
    91 +		      "		stwcx.	%1,%y2\n"			      \
    92  		      "		bne-	1b"				      \
    93 -		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
    94 -		      : "b" (mem), "r" (value), "m" (*mem)		      \
    95 +		      : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem)	      \
    96 +		      : "r" (value)					      \
    97  		      : "cr0", "memory");				      \
    98      __val;								      \
    99    })
   100 @@ -157,12 +157,12 @@
   101  #define __arch_atomic_increment_val_32(mem) \
   102    ({									      \
   103      __typeof (*(mem)) __val;						      \
   104 -    __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
   105 +    __asm __volatile ("1:	lwarx	%0,%y1\n"			      \
   106  		      "		addi	%0,%0,1\n"			      \
   107 -		      "		stwcx.	%0,0,%2\n"			      \
   108 +		      "		stwcx.	%0,%y1\n"			      \
   109  		      "		bne-	1b"				      \
   110 -		      : "=&b" (__val), "=m" (*mem)			      \
   111 -		      : "b" (mem), "m" (*mem)				      \
   112 +		      : "=&b" (__val), "+Z" (*mem)			      \
   113 +		      :							      \
   114  		      : "cr0", "memory");				      \
   115      __val;								      \
   116    })
   117 @@ -170,27 +170,27 @@
   118  #define __arch_atomic_decrement_val_32(mem) \
   119    ({									      \
   120      __typeof (*(mem)) __val;						      \
   121 -    __asm __volatile ("1:	lwarx	%0,0,%2\n"			      \
   122 +    __asm __volatile ("1:	lwarx	%0,%y1\n"			      \
   123  		      "		subi	%0,%0,1\n"			      \
   124 -		      "		stwcx.	%0,0,%2\n"			      \
   125 +		      "		stwcx.	%0,%y1\n"			      \
   126  		      "		bne-	1b"				      \
   127 -		      : "=&b" (__val), "=m" (*mem)			      \
   128 -		      : "b" (mem), "m" (*mem)				      \
   129 +		      : "=&b" (__val), "+Z" (*mem)			      \
   130 +		      :							      \
   131  		      : "cr0", "memory");				      \
   132      __val;								      \
   133    })
   134  
   135  #define __arch_atomic_decrement_if_positive_32(mem) \
   136    ({ int __val, __tmp;							      \
   137 -     __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
   138 +     __asm __volatile ("1:	lwarx	%0,%y2\n"			      \
   139  		       "	cmpwi	0,%0,0\n"			      \
   140  		       "	addi	%1,%0,-1\n"			      \
   141  		       "	ble	2f\n"				      \
   142 -		       "	stwcx.	%1,0,%3\n"			      \
   143 +		       "	stwcx.	%1,%y2\n"			      \
   144  		       "	bne-	1b\n"				      \
   145  		       "2:	" __ARCH_ACQ_INSTR			      \
   146 -		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   147 -		       : "b" (mem), "m" (*mem)				      \
   148 +		       : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem)	      \
   149 +		       :						      \
   150  		       : "cr0", "memory");				      \
   151       __val;								      \
   152    })
   153 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h
   154 --- glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h	2007-03-26 22:15:45.000000000 +0200
   155 +++ glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h	2009-02-02 22:01:40.000000000 +0100
   156 @@ -44,14 +44,14 @@
   157  ({									      \
   158    unsigned int __tmp;							      \
   159    __asm __volatile (							      \
   160 -		    "1:	lwarx	%0,0,%1" MUTEX_HINT_ACQ "\n"		      \
   161 +		    "1:	lwarx	%0,%y1" MUTEX_HINT_ACQ "\n"		      \
   162  		    "	subf.	%0,%2,%0\n"				      \
   163  		    "	bne	2f\n"					      \
   164 -		    "	stwcx.	%3,0,%1\n"				      \
   165 +		    "	stwcx.	%3,%y1\n"				      \
   166  		    "	bne-	1b\n"					      \
   167  		    "2:	" __ARCH_ACQ_INSTR				      \
   168 -		    : "=&r" (__tmp)					      \
   169 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   170 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   171 +		    : "r" (oldval), "r" (newval)			      \
   172  		    : "cr0", "memory");					      \
   173    __tmp != 0;								      \
   174  })
   175 @@ -60,14 +60,14 @@
   176  ({									      \
   177    unsigned int __tmp;							      \
   178    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   179 -		    "1:	lwarx	%0,0,%1" MUTEX_HINT_REL "\n"		      \
   180 +		    "1:	lwarx	%0,%y1" MUTEX_HINT_REL "\n"		      \
   181  		    "	subf.	%0,%2,%0\n"				      \
   182  		    "	bne	2f\n"					      \
   183 -		    "	stwcx.	%3,0,%1\n"				      \
   184 +		    "	stwcx.	%3,%y1\n"				      \
   185  		    "	bne-	1b\n"					      \
   186  		    "2:	"						      \
   187 -		    : "=&r" (__tmp)					      \
   188 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   189 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   190 +		    : "r" (oldval), "r" (newval)			      \
   191  		    : "cr0", "memory");					      \
   192    __tmp != 0;								      \
   193  })
   194 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h
   195 --- glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h	2007-03-26 22:16:03.000000000 +0200
   196 +++ glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h	2009-02-02 22:01:40.000000000 +0100
   197 @@ -44,14 +44,14 @@
   198  ({									      \
   199    unsigned int __tmp, __tmp2;						      \
   200    __asm __volatile ("   clrldi  %1,%1,32\n"				      \
   201 -		    "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	 	      \
   202 +		    "1:	lwarx	%0,%y2" MUTEX_HINT_ACQ "\n"	 	      \
   203  		    "	subf.	%0,%1,%0\n"				      \
   204  		    "	bne	2f\n"					      \
   205 -		    "	stwcx.	%4,0,%2\n"				      \
   206 +		    "	stwcx.	%4,%y2\n"				      \
   207  		    "	bne-	1b\n"					      \
   208  		    "2:	" __ARCH_ACQ_INSTR				      \
   209 -		    : "=&r" (__tmp), "=r" (__tmp2)			      \
   210 -		    : "b" (mem), "1" (oldval), "r" (newval)		      \
   211 +		    : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem))	      \
   212 +		    : "1" (oldval), "r" (newval)			      \
   213  		    : "cr0", "memory");					      \
   214    __tmp != 0;								      \
   215  })
   216 @@ -61,14 +61,14 @@
   217    unsigned int __tmp, __tmp2;						      \
   218    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   219  		    "   clrldi  %1,%1,32\n"				      \
   220 -		    "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
   221 +		    "1:	lwarx	%0,%y2" MUTEX_HINT_REL "\n"		      \
   222  		    "	subf.	%0,%1,%0\n"				      \
   223  		    "	bne	2f\n"					      \
   224 -		    "	stwcx.	%4,0,%2\n"				      \
   225 +		    "	stwcx.	%4,%y2\n"				      \
   226  		    "	bne-	1b\n"					      \
   227  		    "2:	"						      \
   228 -		    : "=&r" (__tmp), "=r" (__tmp2)			      \
   229 -		    : "b" (mem), "1" (oldval), "r" (newval)		      \
   230 +		    : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem))	      \
   231 +		    : "1" (oldval), "r" (newval)			      \
   232  		    : "cr0", "memory");					      \
   233    __tmp != 0;								      \
   234  })
   235 @@ -82,14 +82,14 @@
   236  ({									      \
   237    unsigned long	__tmp;							      \
   238    __asm __volatile (							      \
   239 -		    "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"		      \
   240 +		    "1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"		      \
   241  		    "	subf.	%0,%2,%0\n"				      \
   242  		    "	bne	2f\n"					      \
   243 -		    "	stdcx.	%3,0,%1\n"				      \
   244 +		    "	stdcx.	%3,%y1\n"				      \
   245  		    "	bne-	1b\n"					      \
   246  		    "2:	" __ARCH_ACQ_INSTR				      \
   247 -		    : "=&r" (__tmp)					      \
   248 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   249 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   250 +		    : "r" (oldval), "r" (newval)			      \
   251  		    : "cr0", "memory");					      \
   252    __tmp != 0;								      \
   253  })
   254 @@ -98,14 +98,14 @@
   255  ({									      \
   256    unsigned long	__tmp;							      \
   257    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   258 -		    "1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
   259 +		    "1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"		      \
   260  		    "	subf.	%0,%2,%0\n"				      \
   261  		    "	bne	2f\n"					      \
   262 -		    "	stdcx.	%3,0,%1\n"				      \
   263 +		    "	stdcx.	%3,%y1\n"				      \
   264  		    "	bne-	1b\n"					      \
   265  		    "2:	"						      \
   266 -		    : "=&r" (__tmp)					      \
   267 -		    : "b" (mem), "r" (oldval), "r" (newval)		      \
   268 +		    : "=&r" (__tmp), "+Z" (*(mem))			      \
   269 +		    : "r" (oldval), "r" (newval)			      \
   270  		    : "cr0", "memory");					      \
   271    __tmp != 0;								      \
   272  })
   273 @@ -115,14 +115,14 @@
   274        __typeof (*(mem)) __tmp;						      \
   275        __typeof (mem)  __memp = (mem);					      \
   276        __asm __volatile (						      \
   277 -		        "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
   278 +		        "1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
   279  		        "	cmpd	%0,%2\n"			      \
   280  		        "	bne	2f\n"				      \
   281 -		        "	stdcx.	%3,0,%1\n"			      \
   282 +		        "	stdcx.	%3,%y1\n"			      \
   283  		        "	bne-	1b\n"				      \
   284  		        "2:	" __ARCH_ACQ_INSTR			      \
   285 -		        : "=&r" (__tmp)					      \
   286 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
   287 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
   288 +		        : "r" (oldval), "r" (newval)			      \
   289  		        : "cr0", "memory");				      \
   290        __tmp;								      \
   291    })
   292 @@ -132,14 +132,14 @@
   293        __typeof (*(mem)) __tmp;						      \
   294        __typeof (mem)  __memp = (mem);					      \
   295        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   296 -		        "1:	ldarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
   297 +		        "1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
   298  		        "	cmpd	%0,%2\n"			      \
   299  		        "	bne	2f\n"				      \
   300 -		        "	stdcx.	%3,0,%1\n"			      \
   301 +		        "	stdcx.	%3,%y1\n"			      \
   302  		        "	bne-	1b\n"				      \
   303  		        "2:	"					      \
   304 -		        : "=&r" (__tmp)					      \
   305 -		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
   306 +		        : "=&r" (__tmp), "+Z" (*__memp)			      \
   307 +		        : "r" (oldval), "r" (newval)			      \
   308  		        : "cr0", "memory");				      \
   309        __tmp;								      \
   310    })
   311 @@ -148,12 +148,12 @@
   312      ({									      \
   313        __typeof (*mem) __val;						      \
   314        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   315 -			"1:	ldarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
   316 -			"	stdcx.	%3,0,%2\n"			      \
   317 +			"1:	ldarx	%0,%y1" MUTEX_HINT_ACQ "\n"	      \
   318 +			"	stdcx.	%2,%y1\n"			      \
   319  			"	bne-	1b\n"				      \
   320  		  " " __ARCH_ACQ_INSTR					      \
   321 -			: "=&r" (__val), "=m" (*mem)			      \
   322 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   323 +			: "=&r" (__val), "+Z" (*(mem))			      \
   324 +			: "r" (value)					      \
   325  			: "cr0", "memory");				      \
   326        __val;								      \
   327      })
   328 @@ -162,11 +162,11 @@
   329      ({									      \
   330        __typeof (*mem) __val;						      \
   331        __asm __volatile (__ARCH_REL_INSTR "\n"				      \
   332 -			"1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
   333 -			"	stdcx.	%3,0,%2\n"			      \
   334 +			"1:	ldarx	%0,%y1" MUTEX_HINT_REL "\n"	      \
   335 +			"	stdcx.	%2,%y1\n"			      \
   336  			"	bne-	1b"				      \
   337 -			: "=&r" (__val), "=m" (*mem)			      \
   338 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   339 +			: "=&r" (__val), "+Z" (*(mem))			      \
   340 +			: "r" (value)					      \
   341  			: "cr0", "memory");				      \
   342        __val;								      \
   343      })
   344 @@ -174,12 +174,12 @@
   345  #define __arch_atomic_exchange_and_add_64(mem, value) \
   346      ({									      \
   347        __typeof (*mem) __val, __tmp;					      \
   348 -      __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
   349 -			"	add	%1,%0,%4\n"			      \
   350 -			"	stdcx.	%1,0,%3\n"			      \
   351 +      __asm __volatile ("1:	ldarx	%0,%y2\n"			      \
   352 +			"	add	%1,%0,%3\n"			      \
   353 +			"	stdcx.	%1,%y2\n"			      \
   354  			"	bne-	1b"				      \
   355 -			: "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   356 -			: "b" (mem), "r" (value), "m" (*mem)		      \
   357 +			: "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem))	      \
   358 +			: "r" (value)					      \
   359  			: "cr0", "memory");				      \
   360        __val;								      \
   361      })
   362 @@ -187,12 +187,12 @@
   363  #define __arch_atomic_increment_val_64(mem) \
   364      ({									      \
   365        __typeof (*(mem)) __val;						      \
   366 -      __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
   367 +      __asm __volatile ("1:	ldarx	%0,%y1\n"			      \
   368  			"	addi	%0,%0,1\n"			      \
   369 -			"	stdcx.	%0,0,%2\n"			      \
   370 +			"	stdcx.	%0,%y1\n"			      \
   371  			"	bne-	1b"				      \
   372 -			: "=&b" (__val), "=m" (*mem)			      \
   373 -			: "b" (mem), "m" (*mem)				      \
   374 +			: "=&b" (__val), "+Z" (*(mem))			      \
   375 +			:						      \
   376  			: "cr0", "memory");				      \
   377        __val;								      \
   378      })
   379 @@ -200,27 +200,27 @@
   380  #define __arch_atomic_decrement_val_64(mem) \
   381      ({									      \
   382        __typeof (*(mem)) __val;						      \
   383 -      __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
   384 +      __asm __volatile ("1:	ldarx	%0,%y1\n"			      \
   385  			"	subi	%0,%0,1\n"			      \
   386 -			"	stdcx.	%0,0,%2\n"			      \
   387 +			"	stdcx.	%0,%y1\n"			      \
   388  			"	bne-	1b"				      \
   389 -			: "=&b" (__val), "=m" (*mem)			      \
   390 -			: "b" (mem), "m" (*mem)				      \
   391 +			: "=&b" (__val), "+Z" (*(mem))			      \
   392 +			:						      \
   393  			: "cr0", "memory");				      \
   394        __val;								      \
   395      })
   396  
   397  #define __arch_atomic_decrement_if_positive_64(mem) \
   398    ({ int __val, __tmp;							      \
   399 -     __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
   400 +     __asm __volatile ("1:	ldarx	%0,%y2\n"			      \
   401  		       "	cmpdi	0,%0,0\n"			      \
   402  		       "	addi	%1,%0,-1\n"			      \
   403  		       "	ble	2f\n"				      \
   404 -		       "	stdcx.	%1,0,%3\n"			      \
   405 +		       "	stdcx.	%1,%y2\n"			      \
   406  		       "	bne-	1b\n"				      \
   407  		       "2:	" __ARCH_ACQ_INSTR			      \
   408 -		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
   409 -		       : "b" (mem), "m" (*mem)				      \
   410 +		       : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem))	      \
   411 +		       :						      \
   412  		       : "cr0", "memory");				      \
   413       __val;								      \
   414    })