From 82328d0ef4a109e66264e70692fec273b201ee41 Mon Sep 17 00:00:00 2001 From: Eduardo Habkost Date: Tue, 27 Nov 2007 18:08:41 -0200 Subject: [PATCH] x86_64: Add sync_cmpxchg() Add a sync_cmpxchg() for x86_64, including a sync_cmpxchg64() implementation. Needed for Xen's grant table implementation. Signed-off-by: Eduardo Habkost Signed-off-by: Mark McLoughlin --- include/asm-x86/cmpxchg_64.h | 47 ++++++++++++++++++++++++++++++++++++++++++ 1 files changed, 47 insertions(+), 0 deletions(-) diff --git a/include/asm-x86/cmpxchg_64.h b/include/asm-x86/cmpxchg_64.h index 56f5b41..1556f64 100644 --- a/include/asm-x86/cmpxchg_64.h +++ b/include/asm-x86/cmpxchg_64.h @@ -91,6 +91,45 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old, return old; } +/* + * Always use locked operations when touching memory shared with a + * hypervisor, since the system may be SMP even if the guest kernel + * isn't. + */ +static inline unsigned long __sync_cmpxchg(volatile void *ptr, + unsigned long old, + unsigned long new, int size) +{ + unsigned long prev; + switch (size) { + case 1: + __asm__ __volatile__("lock; cmpxchgb %b1,%2" + : "=a"(prev) + : "q"(new), "m"(*__xg(ptr)), "0"(old) + : "memory"); + return prev; + case 2: + __asm__ __volatile__("lock; cmpxchgw %w1,%2" + : "=a"(prev) + : "r"(new), "m"(*__xg(ptr)), "0"(old) + : "memory"); + return prev; + case 4: + __asm__ __volatile__("lock; cmpxchgl %k1,%2" + : "=a"(prev) + : "r"(new), "m"(*__xg(ptr)), "0"(old) + : "memory"); + return prev; + case 8: + __asm__ __volatile__("lock; cmpxchgq %1,%2" + : "=a"(prev) + : "r"(new), "m"(*__xg(ptr)), "0"(old) + : "memory"); + return prev; + } + return old; +} + static inline unsigned long __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new, int size) { @@ -132,6 +171,14 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr, BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ cmpxchg((ptr), (o), (n)); \ }) +#define sync_cmpxchg(ptr, o, n) \ + ((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o), \ + (unsigned long)(n), sizeof(*(ptr)))) +#define sync_cmpxchg64(ptr, o, n) \ + ({ \ + BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ + sync_cmpxchg((ptr), (o), (n)); \ + }) #define cmpxchg_local(ptr, o, n) \ ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ (unsigned long)(n), sizeof(*(ptr)))) -- 1.5.4.1