Add a patchset for glibc 2_9. Vampirised from the gentoo repository.
/trunk/patches/glibc/2_9/500-ppc-glibc-2.9-atomic.patch | 414 414 0 0 ++++++++++
/trunk/patches/glibc/2_9/140-regex-BZ9697.patch | 111 111 0 0 +++
/trunk/patches/glibc/2_9/190-queue-header-updates.patch | 89 89 0 0 ++
/trunk/patches/glibc/2_9/370-hppa_glibc-2.7-hppa-nptl-carlos.patch | 249 249 0 0 ++++++
/trunk/patches/glibc/2_9/450-alpha-glibc-2.5-no-page-header.patch | 32 32 0 0 +
/trunk/patches/glibc/2_9/270-ldbl-nexttowardf.patch | 68 68 0 0 ++
/trunk/patches/glibc/2_9/330-2.3.3-china.patch | 35 35 0 0 +
/trunk/patches/glibc/2_9/340-new-valencian-locale.patch | 120 120 0 0 +++
/trunk/patches/glibc/2_9/410-2.9-fnmatch.patch | 64 64 0 0 ++
/trunk/patches/glibc/2_9/350-2.4-undefine-__i686.patch | 47 47 0 0 +
/trunk/patches/glibc/2_9/160-i386-x86_64-revert-clone-cfi.patch | 53 53 0 0 +
/trunk/patches/glibc/2_9/420-dont-build-timezone.patch | 19 19 0 0 +
/trunk/patches/glibc/2_9/150-regex-BZ697.patch | 28 28 0 0 +
/trunk/patches/glibc/2_9/290-no-inline-gmon.patch | 38 38 0 0 +
/trunk/patches/glibc/2_9/280-section-comments.patch | 29 29 0 0 +
/trunk/patches/glibc/2_9/180-math-tests.patch | 72 72 0 0 ++
/trunk/patches/glibc/2_9/120-_nss_dns_gethostbyaddr2_r-check-and-adjust-the-buffer-alignment.patch | 35 35 0 0 +
/trunk/patches/glibc/2_9/250-resolv-dynamic.patch | 44 44 0 0 +
/trunk/patches/glibc/2_9/300-2.9-check_native-headers.patch | 22 22 0 0 +
/trunk/patches/glibc/2_9/310-2.3.6-fix-pr631.patch | 50 50 0 0 +
/trunk/patches/glibc/2_9/130-add_prio-macros.patch | 26 26 0 0 +
/trunk/patches/glibc/2_9/260-fadvise64_64.patch | 30 30 0 0 +
/trunk/patches/glibc/2_9/440-alpha-glibc-2.4-xstat.patch | 249 249 0 0 ++++++
/trunk/patches/glibc/2_9/170-2.10-dns-no-gethostbyname4.patch | 35 35 0 0 +
/trunk/patches/glibc/2_9/230-2.3.3-localedef-fix-trampoline.patch | 74 74 0 0 ++
/trunk/patches/glibc/2_9/360-2.8-nscd-one-fork.patch | 45 45 0 0 +
/trunk/patches/glibc/2_9/480-alpha-glibc-2.8-cache-shape.patch | 18 18 0 0 +
/trunk/patches/glibc/2_9/210-2.9-strlen-hack.patch | 109 109 0 0 +++
/trunk/patches/glibc/2_9/320-2.9-assume-pipe2.patch | 59 59 0 0 +
/trunk/patches/glibc/2_9/100-ssp-compat.patch | 193 193 0 0 +++++
/trunk/patches/glibc/2_9/110-respect-env-CPPFLAGS.patch | 30 30 0 0 +
/trunk/patches/glibc/2_9/220-manual-no-perl.patch | 29 29 0 0 +
/trunk/patches/glibc/2_9/390-2.3.3_pre20040117-pt_pax.patch | 35 35 0 0 +
/trunk/patches/glibc/2_9/460-alpha-glibc-2.5-no-asm-elf-header.patch | 38 38 0 0 +
/trunk/patches/glibc/2_9/400-tests-sandbox-libdl-paths.patch | 198 198 0 0 +++++
/trunk/patches/glibc/2_9/240-i386-LOAD_PIC_REG.patch | 23 23 0 0 +
/trunk/patches/glibc/2_9/200-awk-in-C-locale.patch | 23 23 0 0 +
/trunk/patches/glibc/2_9/430-2.7-cross-compile-nptl.patch | 57 57 0 0 +
/trunk/patches/glibc/2_9/380-2.3.6-dl_execstack-PaX-support.patch | 71 71 0 0 ++
/trunk/patches/glibc/2_9/490-ptr-mangling.patch | 114 114 0 0 +++
/trunk/patches/glibc/2_9/470-alpha-glibc-2.8-creat.patch | 19 19 0 0 +
41 files changed, 3094 insertions(+)
1 Original patch from: gentoo/src/patchsets/glibc/2.9/6120_all_ppc-glibc-2.9-atomic.patch
3 -= BEGIN original header =-
6 -= END original header =-
8 diff -durN glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h glibc-2_9/sysdeps/powerpc/bits/atomic.h
9 --- glibc-2_9.orig/sysdeps/powerpc/bits/atomic.h 2007-03-26 22:15:28.000000000 +0200
10 +++ glibc-2_9/sysdeps/powerpc/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
12 __typeof (*(mem)) __tmp; \
13 __typeof (mem) __memp = (mem); \
15 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
16 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
19 - " stwcx. %3,0,%1\n" \
20 + " stwcx. %3,%y1\n" \
22 "2: " __ARCH_ACQ_INSTR \
24 - : "b" (__memp), "r" (oldval), "r" (newval) \
25 + : "=&r" (__tmp), "+Z" (*__memp) \
26 + : "r" (oldval), "r" (newval) \
31 __typeof (*(mem)) __tmp; \
32 __typeof (mem) __memp = (mem); \
33 __asm __volatile (__ARCH_REL_INSTR "\n" \
34 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
35 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
38 - " stwcx. %3,0,%1\n" \
39 + " stwcx. %3,%y1\n" \
43 - : "b" (__memp), "r" (oldval), "r" (newval) \
44 + : "=&r" (__tmp), "+Z" (__memp) \
45 + : "r" (oldval), "r" (newval) \
51 __typeof (*mem) __val; \
53 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
54 - " stwcx. %3,0,%2\n" \
55 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
56 + " stwcx. %2,%y1\n" \
58 " " __ARCH_ACQ_INSTR \
59 - : "=&r" (__val), "=m" (*mem) \
60 - : "b" (mem), "r" (value), "m" (*mem) \
61 + : "=&r" (__val), "+Z" (*mem) \
68 __typeof (*mem) __val; \
69 __asm __volatile (__ARCH_REL_INSTR "\n" \
70 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
71 - " stwcx. %3,0,%2\n" \
72 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
73 + " stwcx. %2,%y1\n" \
75 - : "=&r" (__val), "=m" (*mem) \
76 - : "b" (mem), "r" (value), "m" (*mem) \
77 + : "=&r" (__val), "+Z" (*mem) \
83 #define __arch_atomic_exchange_and_add_32(mem, value) \
85 __typeof (*mem) __val, __tmp; \
86 - __asm __volatile ("1: lwarx %0,0,%3\n" \
88 - " stwcx. %1,0,%3\n" \
89 + __asm __volatile ("1: lwarx %0,%y2\n" \
91 + " stwcx. %1,%y2\n" \
93 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
94 - : "b" (mem), "r" (value), "m" (*mem) \
95 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
100 @@ -157,12 +157,12 @@
101 #define __arch_atomic_increment_val_32(mem) \
103 __typeof (*(mem)) __val; \
104 - __asm __volatile ("1: lwarx %0,0,%2\n" \
105 + __asm __volatile ("1: lwarx %0,%y1\n" \
107 - " stwcx. %0,0,%2\n" \
108 + " stwcx. %0,%y1\n" \
110 - : "=&b" (__val), "=m" (*mem) \
111 - : "b" (mem), "m" (*mem) \
112 + : "=&b" (__val), "+Z" (*mem) \
114 : "cr0", "memory"); \
117 @@ -170,27 +170,27 @@
118 #define __arch_atomic_decrement_val_32(mem) \
120 __typeof (*(mem)) __val; \
121 - __asm __volatile ("1: lwarx %0,0,%2\n" \
122 + __asm __volatile ("1: lwarx %0,%y1\n" \
124 - " stwcx. %0,0,%2\n" \
125 + " stwcx. %0,%y1\n" \
127 - : "=&b" (__val), "=m" (*mem) \
128 - : "b" (mem), "m" (*mem) \
129 + : "=&b" (__val), "+Z" (*mem) \
131 : "cr0", "memory"); \
135 #define __arch_atomic_decrement_if_positive_32(mem) \
136 ({ int __val, __tmp; \
137 - __asm __volatile ("1: lwarx %0,0,%3\n" \
138 + __asm __volatile ("1: lwarx %0,%y2\n" \
142 - " stwcx. %1,0,%3\n" \
143 + " stwcx. %1,%y2\n" \
145 "2: " __ARCH_ACQ_INSTR \
146 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
147 - : "b" (mem), "m" (*mem) \
148 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*mem) \
150 : "cr0", "memory"); \
153 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h
154 --- glibc-2_9.orig/sysdeps/powerpc/powerpc32/bits/atomic.h 2007-03-26 22:15:45.000000000 +0200
155 +++ glibc-2_9/sysdeps/powerpc/powerpc32/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
158 unsigned int __tmp; \
160 - "1: lwarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
161 + "1: lwarx %0,%y1" MUTEX_HINT_ACQ "\n" \
162 " subf. %0,%2,%0\n" \
164 - " stwcx. %3,0,%1\n" \
165 + " stwcx. %3,%y1\n" \
167 "2: " __ARCH_ACQ_INSTR \
169 - : "b" (mem), "r" (oldval), "r" (newval) \
170 + : "=&r" (__tmp), "+Z" (*(mem)) \
171 + : "r" (oldval), "r" (newval) \
172 : "cr0", "memory"); \
177 unsigned int __tmp; \
178 __asm __volatile (__ARCH_REL_INSTR "\n" \
179 - "1: lwarx %0,0,%1" MUTEX_HINT_REL "\n" \
180 + "1: lwarx %0,%y1" MUTEX_HINT_REL "\n" \
181 " subf. %0,%2,%0\n" \
183 - " stwcx. %3,0,%1\n" \
184 + " stwcx. %3,%y1\n" \
188 - : "b" (mem), "r" (oldval), "r" (newval) \
189 + : "=&r" (__tmp), "+Z" (*(mem)) \
190 + : "r" (oldval), "r" (newval) \
191 : "cr0", "memory"); \
194 diff -durN glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h
195 --- glibc-2_9.orig/sysdeps/powerpc/powerpc64/bits/atomic.h 2007-03-26 22:16:03.000000000 +0200
196 +++ glibc-2_9/sysdeps/powerpc/powerpc64/bits/atomic.h 2009-02-02 22:01:40.000000000 +0100
199 unsigned int __tmp, __tmp2; \
200 __asm __volatile (" clrldi %1,%1,32\n" \
201 - "1: lwarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
202 + "1: lwarx %0,%y2" MUTEX_HINT_ACQ "\n" \
203 " subf. %0,%1,%0\n" \
205 - " stwcx. %4,0,%2\n" \
206 + " stwcx. %4,%y2\n" \
208 "2: " __ARCH_ACQ_INSTR \
209 - : "=&r" (__tmp), "=r" (__tmp2) \
210 - : "b" (mem), "1" (oldval), "r" (newval) \
211 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
212 + : "1" (oldval), "r" (newval) \
213 : "cr0", "memory"); \
217 unsigned int __tmp, __tmp2; \
218 __asm __volatile (__ARCH_REL_INSTR "\n" \
219 " clrldi %1,%1,32\n" \
220 - "1: lwarx %0,0,%2" MUTEX_HINT_REL "\n" \
221 + "1: lwarx %0,%y2" MUTEX_HINT_REL "\n" \
222 " subf. %0,%1,%0\n" \
224 - " stwcx. %4,0,%2\n" \
225 + " stwcx. %4,%y2\n" \
228 - : "=&r" (__tmp), "=r" (__tmp2) \
229 - : "b" (mem), "1" (oldval), "r" (newval) \
230 + : "=&r" (__tmp), "=r" (__tmp2), "+Z" (*(mem)) \
231 + : "1" (oldval), "r" (newval) \
232 : "cr0", "memory"); \
237 unsigned long __tmp; \
239 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
240 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
241 " subf. %0,%2,%0\n" \
243 - " stdcx. %3,0,%1\n" \
244 + " stdcx. %3,%y1\n" \
246 "2: " __ARCH_ACQ_INSTR \
248 - : "b" (mem), "r" (oldval), "r" (newval) \
249 + : "=&r" (__tmp), "+Z" (*(mem)) \
250 + : "r" (oldval), "r" (newval) \
251 : "cr0", "memory"); \
256 unsigned long __tmp; \
257 __asm __volatile (__ARCH_REL_INSTR "\n" \
258 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
259 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
260 " subf. %0,%2,%0\n" \
262 - " stdcx. %3,0,%1\n" \
263 + " stdcx. %3,%y1\n" \
267 - : "b" (mem), "r" (oldval), "r" (newval) \
268 + : "=&r" (__tmp), "+Z" (*(mem)) \
269 + : "r" (oldval), "r" (newval) \
270 : "cr0", "memory"); \
273 @@ -115,14 +115,14 @@
274 __typeof (*(mem)) __tmp; \
275 __typeof (mem) __memp = (mem); \
277 - "1: ldarx %0,0,%1" MUTEX_HINT_ACQ "\n" \
278 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
281 - " stdcx. %3,0,%1\n" \
282 + " stdcx. %3,%y1\n" \
284 "2: " __ARCH_ACQ_INSTR \
286 - : "b" (__memp), "r" (oldval), "r" (newval) \
287 + : "=&r" (__tmp), "+Z" (*__memp) \
288 + : "r" (oldval), "r" (newval) \
289 : "cr0", "memory"); \
292 @@ -132,14 +132,14 @@
293 __typeof (*(mem)) __tmp; \
294 __typeof (mem) __memp = (mem); \
295 __asm __volatile (__ARCH_REL_INSTR "\n" \
296 - "1: ldarx %0,0,%1" MUTEX_HINT_REL "\n" \
297 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
300 - " stdcx. %3,0,%1\n" \
301 + " stdcx. %3,%y1\n" \
305 - : "b" (__memp), "r" (oldval), "r" (newval) \
306 + : "=&r" (__tmp), "+Z" (*__memp) \
307 + : "r" (oldval), "r" (newval) \
308 : "cr0", "memory"); \
311 @@ -148,12 +148,12 @@
313 __typeof (*mem) __val; \
314 __asm __volatile (__ARCH_REL_INSTR "\n" \
315 - "1: ldarx %0,0,%2" MUTEX_HINT_ACQ "\n" \
316 - " stdcx. %3,0,%2\n" \
317 + "1: ldarx %0,%y1" MUTEX_HINT_ACQ "\n" \
318 + " stdcx. %2,%y1\n" \
320 " " __ARCH_ACQ_INSTR \
321 - : "=&r" (__val), "=m" (*mem) \
322 - : "b" (mem), "r" (value), "m" (*mem) \
323 + : "=&r" (__val), "+Z" (*(mem)) \
325 : "cr0", "memory"); \
328 @@ -162,11 +162,11 @@
330 __typeof (*mem) __val; \
331 __asm __volatile (__ARCH_REL_INSTR "\n" \
332 - "1: ldarx %0,0,%2" MUTEX_HINT_REL "\n" \
333 - " stdcx. %3,0,%2\n" \
334 + "1: ldarx %0,%y1" MUTEX_HINT_REL "\n" \
335 + " stdcx. %2,%y1\n" \
337 - : "=&r" (__val), "=m" (*mem) \
338 - : "b" (mem), "r" (value), "m" (*mem) \
339 + : "=&r" (__val), "+Z" (*(mem)) \
341 : "cr0", "memory"); \
344 @@ -174,12 +174,12 @@
345 #define __arch_atomic_exchange_and_add_64(mem, value) \
347 __typeof (*mem) __val, __tmp; \
348 - __asm __volatile ("1: ldarx %0,0,%3\n" \
349 - " add %1,%0,%4\n" \
350 - " stdcx. %1,0,%3\n" \
351 + __asm __volatile ("1: ldarx %0,%y2\n" \
352 + " add %1,%0,%3\n" \
353 + " stdcx. %1,%y2\n" \
355 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
356 - : "b" (mem), "r" (value), "m" (*mem) \
357 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
359 : "cr0", "memory"); \
362 @@ -187,12 +187,12 @@
363 #define __arch_atomic_increment_val_64(mem) \
365 __typeof (*(mem)) __val; \
366 - __asm __volatile ("1: ldarx %0,0,%2\n" \
367 + __asm __volatile ("1: ldarx %0,%y1\n" \
369 - " stdcx. %0,0,%2\n" \
370 + " stdcx. %0,%y1\n" \
372 - : "=&b" (__val), "=m" (*mem) \
373 - : "b" (mem), "m" (*mem) \
374 + : "=&b" (__val), "+Z" (*(mem)) \
376 : "cr0", "memory"); \
379 @@ -200,27 +200,27 @@
380 #define __arch_atomic_decrement_val_64(mem) \
382 __typeof (*(mem)) __val; \
383 - __asm __volatile ("1: ldarx %0,0,%2\n" \
384 + __asm __volatile ("1: ldarx %0,%y1\n" \
386 - " stdcx. %0,0,%2\n" \
387 + " stdcx. %0,%y1\n" \
389 - : "=&b" (__val), "=m" (*mem) \
390 - : "b" (mem), "m" (*mem) \
391 + : "=&b" (__val), "+Z" (*(mem)) \
393 : "cr0", "memory"); \
397 #define __arch_atomic_decrement_if_positive_64(mem) \
398 ({ int __val, __tmp; \
399 - __asm __volatile ("1: ldarx %0,0,%3\n" \
400 + __asm __volatile ("1: ldarx %0,%y2\n" \
404 - " stdcx. %1,0,%3\n" \
405 + " stdcx. %1,%y2\n" \
407 "2: " __ARCH_ACQ_INSTR \
408 - : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \
409 - : "b" (mem), "m" (*mem) \
410 + : "=&b" (__val), "=&r" (__tmp), "+Z" (*(mem)) \
412 : "cr0", "memory"); \