2
0
mirror of https://github.com/checkpoint-restore/criu synced 2025-08-22 18:07:57 +00:00
criu/arch/x86/include/asm/atomic.h

74 lines
1.4 KiB
C
Raw Normal View History

#ifndef __CR_ATOMIC_H__
#define __CR_ATOMIC_H__
#include "asm/cmpxchg.h"
#define LOCK_PREFIX "\n\tlock; "
typedef struct {
int counter;
} atomic_t;
#define ATOMIC_INIT(i) { (i) }
static inline int atomic_read(const atomic_t *v)
{
return (*(volatile int *)&(v)->counter);
}
static inline void atomic_set(atomic_t *v, int i)
{
v->counter = i;
}
static inline void atomic_add(int i, atomic_t *v)
{
asm volatile(LOCK_PREFIX "addl %1,%0"
: "+m" (v->counter)
: "ir" (i));
}
static inline void atomic_sub(int i, atomic_t *v)
{
asm volatile(LOCK_PREFIX "subl %1,%0"
: "+m" (v->counter)
: "ir" (i));
}
static inline void atomic_inc(atomic_t *v)
{
asm volatile(LOCK_PREFIX "incl %0"
: "+m" (v->counter));
}
static inline void atomic_dec(atomic_t *v)
{
asm volatile(LOCK_PREFIX "decl %0"
: "+m" (v->counter));
}
static inline int atomic_dec_and_test(atomic_t *v)
{
unsigned char c;
asm volatile(LOCK_PREFIX "decl %0; sete %1"
: "+m" (v->counter), "=qm" (c)
: : "memory");
return c != 0;
}
static inline int atomic_add_return(int i, atomic_t *v)
{
return i + xadd(&v->counter, i);
}
static inline int atomic_sub_return(int i, atomic_t *v)
{
return atomic_add_return(-i, v);
}
#define atomic_inc_return(v) (atomic_add_return(1, v))
#define atomic_dec_return(v) (atomic_sub_return(1, v))
#endif /* __CR_ATOMIC_H__ */