2005-04-17 06:20:36 +08:00
|
|
|
#ifndef __ARCH_I386_ATOMIC__
|
|
|
|
#define __ARCH_I386_ATOMIC__
|
|
|
|
|
|
|
|
#include <linux/compiler.h>
|
|
|
|
#include <asm/processor.h>
|
2007-05-08 15:35:02 +08:00
|
|
|
#include <asm/cmpxchg.h>
|
2005-04-17 06:20:36 +08:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Atomic operations that C can't guarantee us. Useful for
|
|
|
|
* resource counting etc..
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Make sure gcc doesn't try to be clever and move things around
|
|
|
|
* on us. We need to use _exactly_ the address the user gave us,
|
|
|
|
* not some alias that contains the same information.
|
|
|
|
*/
|
2006-12-07 06:42:57 +08:00
|
|
|
typedef struct { int counter; } atomic_t;
|
2005-04-17 06:20:36 +08:00
|
|
|
|
|
|
|
#define ATOMIC_INIT(i) { (i) }
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_read - read atomic variable
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically reads the value of @v.
|
|
|
|
*/
|
|
|
|
#define atomic_read(v) ((v)->counter)
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_set - set atomic variable
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @i: required value
|
|
|
|
*
|
|
|
|
* Atomically sets the value of @v to @i.
|
|
|
|
*/
|
|
|
|
#define atomic_set(v,i) (((v)->counter) = (i))
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_add - add integer to atomic variable
|
|
|
|
* @i: integer value to add
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically adds @i to @v.
|
|
|
|
*/
|
|
|
|
static __inline__ void atomic_add(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "addl %1,%0"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter)
|
|
|
|
:"ir" (i));
|
2005-04-17 06:20:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2007-05-08 15:35:08 +08:00
|
|
|
* atomic_sub - subtract integer from atomic variable
|
2005-04-17 06:20:36 +08:00
|
|
|
* @i: integer value to subtract
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically subtracts @i from @v.
|
|
|
|
*/
|
|
|
|
static __inline__ void atomic_sub(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "subl %1,%0"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter)
|
|
|
|
:"ir" (i));
|
2005-04-17 06:20:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_sub_and_test - subtract value from variable and test result
|
|
|
|
* @i: integer value to subtract
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically subtracts @i from @v and returns
|
|
|
|
* true if the result is zero, or false for all
|
|
|
|
* other cases.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
unsigned char c;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "subl %2,%0; sete %1"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter), "=qm" (c)
|
|
|
|
:"ir" (i) : "memory");
|
2005-04-17 06:20:36 +08:00
|
|
|
return c;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_inc - increment atomic variable
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically increments @v by 1.
|
|
|
|
*/
|
|
|
|
static __inline__ void atomic_inc(atomic_t *v)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "incl %0"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter));
|
2005-04-17 06:20:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_dec - decrement atomic variable
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically decrements @v by 1.
|
|
|
|
*/
|
|
|
|
static __inline__ void atomic_dec(atomic_t *v)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "decl %0"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter));
|
2005-04-17 06:20:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_dec_and_test - decrement and test
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically decrements @v by 1 and
|
|
|
|
* returns true if the result is 0, or false for all other
|
|
|
|
* cases.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_dec_and_test(atomic_t *v)
|
|
|
|
{
|
|
|
|
unsigned char c;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "decl %0; sete %1"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter), "=qm" (c)
|
|
|
|
: : "memory");
|
2005-04-17 06:20:36 +08:00
|
|
|
return c != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_inc_and_test - increment and test
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically increments @v by 1
|
|
|
|
* and returns true if the result is zero, or false for all
|
|
|
|
* other cases.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_inc_and_test(atomic_t *v)
|
|
|
|
{
|
|
|
|
unsigned char c;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "incl %0; sete %1"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter), "=qm" (c)
|
|
|
|
: : "memory");
|
2005-04-17 06:20:36 +08:00
|
|
|
return c != 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic_add_negative - add and test if negative
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @i: integer value to add
|
|
|
|
*
|
|
|
|
* Atomically adds @i to @v and returns true
|
|
|
|
* if the result is negative, or false when
|
|
|
|
* result is greater than or equal to zero.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_add_negative(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
unsigned char c;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-03-23 18:59:32 +08:00
|
|
|
LOCK_PREFIX "addl %2,%0; sets %1"
|
2006-07-09 06:24:18 +08:00
|
|
|
:"+m" (v->counter), "=qm" (c)
|
|
|
|
:"ir" (i) : "memory");
|
2005-04-17 06:20:36 +08:00
|
|
|
return c;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2007-05-08 15:35:08 +08:00
|
|
|
* atomic_add_return - add integer and return
|
2005-04-17 06:20:36 +08:00
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @i: integer value to add
|
|
|
|
*
|
|
|
|
* Atomically adds @i to @v and returns @i + @v
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic_add_return(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
int __i;
|
|
|
|
#ifdef CONFIG_M386
|
2006-04-19 13:21:10 +08:00
|
|
|
unsigned long flags;
|
2007-05-24 04:58:19 +08:00
|
|
|
if(unlikely(boot_cpu_data.x86 <= 3))
|
2005-04-17 06:20:36 +08:00
|
|
|
goto no_xadd;
|
|
|
|
#endif
|
|
|
|
/* Modern 486+ processor */
|
|
|
|
__i = i;
|
|
|
|
__asm__ __volatile__(
|
[PATCH] x86-64: fix asm constraints in i386 atomic_add_return
Since v->counter is both read and written, it should be an output as well
as an input for the asm. The current code only gets away with this because
counter is volatile. Also, according to Documents/atomic_ops.txt,
atomic_add_return should provide a memory barrier, in particular a compiler
barrier, so the asm should be marked as clobbering memory.
Test case:
#include <stdio.h>
typedef struct { int counter; } atomic_t; /* NB: no "volatile" */
#define ATOMIC_INIT(i) { (i) }
#define atomic_read(v) ((v)->counter)
static __inline__ int atomic_add_return(int i, atomic_t *v)
{
int __i = i;
__asm__ __volatile__(
"lock; xaddl %0, %1;"
:"=r"(i)
:"m"(v->counter), "0"(i));
/* __asm__ __volatile__(
"lock; xaddl %0, %1"
:"+r" (i), "+m" (v->counter)
: : "memory"); */
return i + __i;
}
int main (void) {
atomic_t a = ATOMIC_INIT(0);
int x;
x = atomic_add_return (1, &a);
if ((x!=1) || (atomic_read(&a)!=1))
printf("fail: %i, %i\n", x, atomic_read(&a));
}
Signed-off-by: Duncan Sands <baldrick@free.fr>
Signed-off-by: Andi Kleen <ak@suse.de>
Cc: Andi Kleen <ak@suse.de>
Acked-by: David Howells <dhowells@redhat.com>
Signed-off-by: Andrew Morton <akpm@osdl.org>
2006-12-07 09:14:13 +08:00
|
|
|
LOCK_PREFIX "xaddl %0, %1"
|
|
|
|
:"+r" (i), "+m" (v->counter)
|
|
|
|
: : "memory");
|
2005-04-17 06:20:36 +08:00
|
|
|
return i + __i;
|
|
|
|
|
|
|
|
#ifdef CONFIG_M386
|
|
|
|
no_xadd: /* Legacy 386 processor */
|
2006-04-19 13:21:10 +08:00
|
|
|
local_irq_save(flags);
|
2005-04-17 06:20:36 +08:00
|
|
|
__i = atomic_read(v);
|
|
|
|
atomic_set(v, i + __i);
|
2006-04-19 13:21:10 +08:00
|
|
|
local_irq_restore(flags);
|
2005-04-17 06:20:36 +08:00
|
|
|
return i + __i;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2007-05-08 15:35:08 +08:00
|
|
|
/**
|
|
|
|
* atomic_sub_return - subtract integer and return
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @i: integer value to subtract
|
|
|
|
*
|
|
|
|
* Atomically subtracts @i from @v and returns @v - @i
|
|
|
|
*/
|
2005-04-17 06:20:36 +08:00
|
|
|
static __inline__ int atomic_sub_return(int i, atomic_t *v)
|
|
|
|
{
|
|
|
|
return atomic_add_return(-i,v);
|
|
|
|
}
|
|
|
|
|
2007-05-08 15:34:20 +08:00
|
|
|
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
|
|
|
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
|
2005-11-14 08:07:24 +08:00
|
|
|
|
2005-11-14 08:07:25 +08:00
|
|
|
/**
|
2007-02-10 17:45:59 +08:00
|
|
|
* atomic_add_unless - add unless the number is already a given value
|
2005-11-14 08:07:25 +08:00
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @a: the amount to add to v...
|
|
|
|
* @u: ...unless v is equal to u.
|
|
|
|
*
|
2007-02-10 17:45:59 +08:00
|
|
|
* Atomically adds @a to @v, so long as @v was not already @u.
|
2005-11-14 08:07:25 +08:00
|
|
|
* Returns non-zero if @v was not @u, and zero otherwise.
|
|
|
|
*/
|
2007-05-08 15:34:38 +08:00
|
|
|
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
|
{
|
|
|
|
int c, old;
|
|
|
|
c = atomic_read(v);
|
|
|
|
for (;;) {
|
|
|
|
if (unlikely(c == (u)))
|
|
|
|
break;
|
|
|
|
old = atomic_cmpxchg((v), c, c + (a));
|
|
|
|
if (likely(old == c))
|
|
|
|
break;
|
|
|
|
c = old;
|
|
|
|
}
|
|
|
|
return c != (u);
|
|
|
|
}
|
|
|
|
|
2005-11-14 08:07:25 +08:00
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
|
|
|
|
2005-04-17 06:20:36 +08:00
|
|
|
#define atomic_inc_return(v) (atomic_add_return(1,v))
|
|
|
|
#define atomic_dec_return(v) (atomic_sub_return(1,v))
|
|
|
|
|
|
|
|
/* These are x86-specific, used by some header files */
|
|
|
|
#define atomic_clear_mask(mask, addr) \
|
2006-03-23 18:59:32 +08:00
|
|
|
__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
|
2005-04-17 06:20:36 +08:00
|
|
|
: : "r" (~(mask)),"m" (*addr) : "memory")
|
|
|
|
|
|
|
|
#define atomic_set_mask(mask, addr) \
|
2006-03-23 18:59:32 +08:00
|
|
|
__asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
|
2005-04-17 06:20:36 +08:00
|
|
|
: : "r" (mask),"m" (*(addr)) : "memory")
|
|
|
|
|
|
|
|
/* Atomic operations are already serializing on x86 */
|
|
|
|
#define smp_mb__before_atomic_dec() barrier()
|
|
|
|
#define smp_mb__after_atomic_dec() barrier()
|
|
|
|
#define smp_mb__before_atomic_inc() barrier()
|
|
|
|
#define smp_mb__after_atomic_inc() barrier()
|
|
|
|
|
2006-01-06 16:11:20 +08:00
|
|
|
#include <asm-generic/atomic.h>
|
2005-04-17 06:20:36 +08:00
|
|
|
#endif
|