x86: Add memory modify constraints to xchg() and cmpxchg()

xchg() and cmpxchg() modify their memory operands, not merely read
them.  For some versions of gcc the "memory" clobber has apparently
dealt with the situation, but not for all.

Originally-by: Linus Torvalds <torvalds@linux-foundation.org>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Cc: Glauber Costa <glommer@redhat.com>
Cc: Avi Kivity <avi@redhat.com>
Cc: Peter Palfrader <peter@palfrader.org>
Cc: Greg KH <gregkh@suse.de>
Cc: Alan Cox <alan@lxorguk.ukuu.org.uk>
Cc: Zachary Amsden <zamsden@redhat.com>
Cc: Marcelo Tosatti <mtosatti@redhat.com>
Cc: <stable@kernel.org>
LKML-Reference: <4C4F7277.8050306@zytor.com>
This commit is contained in:
H. Peter Anvin 2010-07-27 17:01:49 -07:00
parent fc0f5ac8fe
commit 113fc5a6e8
2 changed files with 54 additions and 54 deletions

View file

@ -27,20 +27,20 @@ struct __xchg_dummy {
switch (size) { \
case 1: \
asm volatile("xchgb %b0,%1" \
: "=q" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=q" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
case 2: \
asm volatile("xchgw %w0,%1" \
: "=r" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=r" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
case 4: \
asm volatile("xchgl %0,%1" \
: "=r" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=r" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
default: \
@ -70,14 +70,14 @@ static inline void __set_64bit(unsigned long long *ptr,
unsigned int low, unsigned int high)
{
asm volatile("\n1:\t"
"movl (%0), %%eax\n\t"
"movl 4(%0), %%edx\n\t"
LOCK_PREFIX "cmpxchg8b (%0)\n\t"
"movl (%1), %%eax\n\t"
"movl 4(%1), %%edx\n\t"
LOCK_PREFIX "cmpxchg8b (%1)\n\t"
"jnz 1b"
: /* no outputs */
: "D"(ptr),
"b"(low),
"c"(high)
: "=m" (*ptr)
: "D" (ptr),
"b" (low),
"c" (high)
: "ax", "dx", "memory");
}
@ -121,21 +121,21 @@ extern void __cmpxchg_wrong_size(void);
__typeof__(*(ptr)) __new = (new); \
switch (size) { \
case 1: \
asm volatile(lock "cmpxchgb %b1,%2" \
: "=a"(__ret) \
: "q"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgb %b2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "q" (__new), "0" (__old) \
: "memory"); \
break; \
case 2: \
asm volatile(lock "cmpxchgw %w1,%2" \
: "=a"(__ret) \
: "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgw %w2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "r" (__new), "0" (__old) \
: "memory"); \
break; \
case 4: \
asm volatile(lock "cmpxchgl %1,%2" \
: "=a"(__ret) \
: "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgl %2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "r" (__new), "0" (__old) \
: "memory"); \
break; \
default: \
@ -180,12 +180,12 @@ static inline unsigned long long __cmpxchg64(volatile void *ptr,
unsigned long long new)
{
unsigned long long prev;
asm volatile(LOCK_PREFIX "cmpxchg8b %3"
: "=A"(prev)
: "b"((unsigned long)new),
"c"((unsigned long)(new >> 32)),
"m"(*__xg(ptr)),
"0"(old)
asm volatile(LOCK_PREFIX "cmpxchg8b %1"
: "=A" (prev),
"+m" (*__xg(ptr))
: "b" ((unsigned long)new),
"c" ((unsigned long)(new >> 32)),
"0" (old)
: "memory");
return prev;
}
@ -195,12 +195,12 @@ static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
unsigned long long new)
{
unsigned long long prev;
asm volatile("cmpxchg8b %3"
: "=A"(prev)
: "b"((unsigned long)new),
"c"((unsigned long)(new >> 32)),
"m"(*__xg(ptr)),
"0"(old)
asm volatile("cmpxchg8b %1"
: "=A" (prev),
"+m" (*__xg(ptr))
: "b" ((unsigned long)new),
"c" ((unsigned long)(new >> 32)),
"0" (old)
: "memory");
return prev;
}

View file

@ -26,26 +26,26 @@ extern void __cmpxchg_wrong_size(void);
switch (size) { \
case 1: \
asm volatile("xchgb %b0,%1" \
: "=q" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=q" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
case 2: \
asm volatile("xchgw %w0,%1" \
: "=r" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=r" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
case 4: \
asm volatile("xchgl %k0,%1" \
: "=r" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=r" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
case 8: \
asm volatile("xchgq %0,%1" \
: "=r" (__x) \
: "m" (*__xg(ptr)), "0" (__x) \
: "=r" (__x), "+m" (*__xg(ptr)) \
: "0" (__x) \
: "memory"); \
break; \
default: \
@ -71,27 +71,27 @@ extern void __cmpxchg_wrong_size(void);
__typeof__(*(ptr)) __new = (new); \
switch (size) { \
case 1: \
asm volatile(lock "cmpxchgb %b1,%2" \
: "=a"(__ret) \
: "q"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgb %b2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "q" (__new), "0" (__old) \
: "memory"); \
break; \
case 2: \
asm volatile(lock "cmpxchgw %w1,%2" \
: "=a"(__ret) \
: "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgw %w2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "r" (__new), "0" (__old) \
: "memory"); \
break; \
case 4: \
asm volatile(lock "cmpxchgl %k1,%2" \
: "=a"(__ret) \
: "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgl %k2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "r" (__new), "0" (__old) \
: "memory"); \
break; \
case 8: \
asm volatile(lock "cmpxchgq %1,%2" \
: "=a"(__ret) \
: "r"(__new), "m"(*__xg(ptr)), "0"(__old) \
asm volatile(lock "cmpxchgq %2,%1" \
: "=a" (__ret), "+m" (*__xg(ptr)) \
: "r" (__new), "0" (__old) \
: "memory"); \
break; \
default: \